#! /usr/bin/env python3 # -*- coding: utf-8 -*- """ Copyright 2022, Nils Hilbricht, Germany ( https://www.hilbricht.net ) This file is part of the Laborejo Software Suite ( https://www.laborejo.org ), This application is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . """ import logging; logger = logging.getLogger(__name__); logger.info("import") #Python Standard Lib import configparser import pathlib import tarfile from io import TextIOWrapper #Third Party from calfbox import cbox #Template Modules from template.engine.data import Data as TemplateData from template.start import PATHS #Our Modules from engine.instrument import Instrument from engine.auditioner import Auditioner class Data(TemplateData): """There must always be a Data class in a file main.py. The main data is in: self.instruments= {} # (libraryId, instrumentId):Instrument()-object This is created on program startup and never modified afterwards (except internal instrument changes of course). Throughout the program we identify instruments with these unique values: * libraryId : integer, no zero-padding. One for each tar file. * instrumentId : integer, no zero-padding. Unique only within a tar file. * variant: string. An .sfz file name. Can use all characters allowed as linux file name, including spaces. Case sensitive. """ def __init__(self, parentSession): #Program start. super().__init__(parentSession) session = self.parentSession #self.parentSession is already defined in template.data. We just want to work conveniently in init with it by creating a local var. self.libraries = {} # libraryId:int : Library-object self._processAfterInit() def _processAfterInit(self): session = self.parentSession #We just want to work conveniently in init with it by creating a local var. self.cachedSerializedDataForStartEngine = None def allInstr(self): for libId, lib in self.libraries.items(): for instrId, instr in lib.instruments.items(): yield instr def parseAndLoadInstrumentLibraries(self, baseSamplePath, instrumentMidiNoteOnActivity): """Called first by api.startEngine, which receives the global sample path from the GUI. Later called by sample dir rescan. """ #Since this is a function called by the user, or at least the GUI, #we do some error checking. if not baseSamplePath: raise ValueError(f"Wrong format for argument baseSamplePath. Should be a path-string or path-like object but was: {baseSamplePath}") basePath = pathlib.Path(baseSamplePath) if not basePath.exists(): raise OSError(f"{basePath} does not exists to load samples from.") if not basePath.is_dir(): raise OSError(f"{basePath} is not a directory..") firstRun = not self.libraries if firstRun: #in case of re-scan we don't need to do this a second time. The default lib cannot be updated through the download manager and will always be present. s = pathlib.Path(PATHS["share"]) defaultLibraryPath = s.joinpath("000 - Default.tar") logger.info(f"Loading Default Instrument Library from {defaultLibraryPath}. This message must only appear once in the log.") defaultLib = Library(parentData=self, tarFilePath=defaultLibraryPath) #If this fails we let the program crash. The default samples must exist and be accessible. self.libraries[defaultLib.id] = defaultLib assert defaultLib.id == 0, defaultLib.id defaultLib = self.libraries[0] #Remember the current libraries, in case of a rescan, so we can see which ones were deleted in the meantime. libsToDelete = set(self.libraries.keys()) #ints libsToDelete.remove(defaultLib.id) logger.info(f"Start opening and parsing instrument metadata from {baseSamplePath}") for f in basePath.glob('*.tar'): if f.is_file() and f.suffix == ".tar": #First load the library (this is .ini parsing, not sample loading, so it is cheap) and create a library object #It will not create jack ports try: lib = Library(parentData=self, tarFilePath=f) except PermissionError as e: logger.error(f"Library {f} could not be loaded. The reason follows: {e}") continue #Then compare if this is actually a file we already knew: #we loaded this before and it still exists. We will NOT delete it below. if lib.id in libsToDelete: libsToDelete.remove(lib.id) #If this is a completely new lib it is simple: just load if not lib.id in self.libraries: assert not lib.id in libsToDelete, (lib.id, libsToDelete) self.libraries[lib.id] = lib else: #we already know this id oldLib = self.libraries[lib.id] if lib.tarFilePath == oldLib.tarFilePath or lib.tarFilePath.samefile(oldLib.tarFilePath): #Same id, same file path, or (sym)link. We update the old instrument and maybe there are new variants to load. #Loaded state will remain the same. Tembro instruments don't change with updates. self.libraries[lib.id].updateWithNewParse(lib) else: #Same id, different file path. We treat it as a different lib and unload/reload completely. lib.transferOldState(oldLib) #at least reactivate the already loaded instruments. self._unloadLibrary(lib.id) #remove old lib instance self.libraries[lib.id] = lib #this is the new lib instance. logger.info(f"Finished loading samples from {baseSamplePath}") #There might still be loaded libraries left that were not present in the file parsing above. #These are the libs that have been removed as files by the user during runtime. We unload and remove them as well. if len(libsToDelete) > 0: logger.info(f"Start removing {len(libsToDelete)} libraries that were removed from {baseSamplePath}") for libIdToDel in libsToDelete: self._unloadLibrary(libIdToDel) logger.info(f"Finished removing deleted libraries.") if not self.libraries: #TODO: Is this still valid with the guaranteed 000 - Default.tar? logger.error("There were no sample libraries to parse! This is correct on an empty run, since you still need to choose a sample directory.") self.instrumentMidiNoteOnActivity = instrumentMidiNoteOnActivity # the api will inject a callback function here which takes (libId, instrId) as parameter to indicate midi noteOn activity for non-critical information like a GUI LED blinking or checking for new keyswitch states. The instruments individiual midiprocessor will call this as a parent-call. if firstRun: #in case of re-scan we don't need to do this a second time. The default lib cannot be updated through the download manager and will always be present. self._createGlobalPorts() #in its own function for readability self._createCachedJackMetadataSorting() def _unloadLibrary(self, libIdToDel): for instrId, instrObj in self.libraries[libIdToDel].instruments.items(): if instrObj.enabled: #unload if necessary. instrObj.disable() del self.libraries[libIdToDel] #garbage collect all children instruments as well def _createGlobalPorts(self): """Create two mixer ports, for stereo. Each instrument will not only create their own jack out ports but also connect to these left/right. If we are not in an NSM Session auto-connect them to the system ports for convenience. Also create an additional stereo port pair to pre-listen to on sample instrument alone, the Auditioner. """ assert not self.parentSession.standaloneMode is None if self.parentSession.standaloneMode: self.lmixUuid = cbox.JackIO.create_audio_output('left_mix', "#1") #add "#1" as second parameter for auto-connection to system out 1 self.rmixUuid = cbox.JackIO.create_audio_output('right_mix', "#2") #add "#2" as second parameter for auto-connection to system out 2 else: self.lmixUuid = cbox.JackIO.create_audio_output('left_mix') self.rmixUuid = cbox.JackIO.create_audio_output('right_mix') self.auditioner = Auditioner(self) def exportMetadata(self)->dict: """Data we sent in callbacks. This is the initial 'build-the-instrument-database' function. Each first level dict contains another dict with instruments, but also a special key "library" that holds the metadata for the lib itself. """ result = {} for libId, libObj in self.libraries.items(): result[libId] = libObj.exportMetadata() #also a dict. Contains a special key "library" which holds the library metadata itself return result def _createCachedJackMetadataSorting(self): """Calculate once, per programstart, what port order we had if all instruments were loaded. In reality they are not, but we can still use this cache instead of dynamically creating a port order. Needs to be called after parsing the tar/ini files.""" #order = {portName:index for index, portName in enumerate(track.sequencerInterface.cboxPortName() for track in self.tracks if track.sequencerInterface.cboxMidiOutUuid)} highestLibId = max(self.libraries.keys()) highestInstrId = max(inst.id for inst in self.allInstr()) clientName = cbox.JackIO.status().client_name order = {} orderCounter = 0 for instr in self.allInstr(): L = clientName + ":" + instr.midiInputPortName + "_L" R = clientName + ":" + instr.midiInputPortName + "_R" order[L] = (orderCounter, instr) orderCounter += 1 order[R] = (orderCounter, instr) orderCounter += 1 order[clientName + ":" + instr.midiInputPortName] = (orderCounter, instr) #midi port orderCounter +=1 self._cachedJackMedataPortOrder = order def updateJackMetadataSorting(self): """ Tell cbox to reorder the tracks by metadata. We need this everytime we enable/disable an instrument which adds/removes the jack ports. Luckily our data never changes. We can just prepare one order, cache it, filter it and send that again and again. """ cleanedOrder = { fullportname : index for fullportname, (index, instrObj) in self._cachedJackMedataPortOrder.items() if instrObj.enabled} try: cbox.JackIO.Metadata.set_all_port_order(cleanedOrder) #wants a dict with {complete jack portname : sortIndex} except Exception as e: #No Jack Meta Data or Error with ports. logger.error(e) #Save / Load def serialize(self)->dict: return { "libraries" : [libObj.serialize() for libObj in self.libraries.values()], } @classmethod def instanceFromSerializedData(cls, parentSession, serializedData): """As an experiment we try to load everything from this function alone and not create a function hirarchy. Save is in a hierarchy though. This differs from other LSS programs that most data is just static stuff. Instead we delay loading until everything is setup and just deposit saved data here for the startEngine call to use.""" self = cls.__new__(cls) self.session = parentSession self.parentSession = parentSession self._processAfterInit() self.cachedSerializedDataForStartEngine = serializedData return self def loadCachedSerializedData(self): """Called by api.startEngine after all static data libraries are loaded, without jack ports or instrument samples loaded. This is the same as a the empty session without a save file. This way the callbacks have a chance to load instrument with feedback status""" assert self.cachedSerializedDataForStartEngine serializedData = self.cachedSerializedDataForStartEngine for libSerialized in serializedData["libraries"]: libObj = self.libraries[libSerialized["id"]] for instrSerialized in libSerialized["instruments"]: instObj = libObj.instruments[instrSerialized["id"]] instObj.startVariantSfzFilename = instrSerialized["currentVariant"] if instrSerialized["currentVariant"]: instObj.loadSamples() #will use startVariantSfzFilename to set the currentVariant if not instrSerialized["mixerEnabled"] is None: #can be True/False. None for "never touched" or "instrument not loaded" assert instrSerialized["currentVariant"] #mixer is auto-disabled when instrument deactivated instObj.setMixerEnabled(instrSerialized["mixerEnabled"]) if not instrSerialized["mixerLevel"] is None: #could be 0. None for "never touched" or "instrument not loaded" assert instrSerialized["currentVariant"] #mixerLevel is None when no instrument is loaded. mixerEnabled can be False though. instObj.mixerLevel = instrSerialized["mixerLevel"] class Library(object): """Open a .tar library and extract information without actually loading any samples. This is for GUI data etc. You get all metadata from this. The samples are not loaded when Library() returns. The API can loop over self.allInstr() and call instr.loadSamples() and send a feedback to callbacks. There is also a shortcut. First only an external .ini is loaded, which is much faster than unpacking the tar. If no additional data like images are needed (which is always the case at this version 1.0) we parse this external ini directly to build our database. """ def __init__(self, parentData, tarFilePath): self.parentData = parentData self.tarFilePath = tarFilePath #pathlib.Path() if not tarFilePath.suffix == ".tar": raise RuntimeError(f"Wrong file {tarFilePath}") self.instruments = {} # instrId : Instrument() logger.info(f"Parsing {tarFilePath}") needTarData = True if needTarData: with tarfile.open(name=tarFilePath, mode='r:') as opentarfile: #PermissionErrors are caught by the constructing line in main/Data above iniFileObject = TextIOWrapper(opentarfile.extractfile("library.ini")) self.config = configparser.ConfigParser() self.config.read_file(iniFileObject) #self.config is permant now. We can close the file object """ #Extract an image file. But only if it exists. tarfile.getmember is basically an exist-check that trows KeyError if not try: imageAsBytes = extractfile("logo.png").read() #Qt can handle the format except KeyError: #file not found imageAsBytes = None """ else: #TODO: This is permanently deactivated. Could be used in the future to load an extracted-to-cache version of the ini file. Speedup is significant, but the files get inconvenienet to download iniName = str(tarFilePath)[:-4] + ".ini" self.config = configparser.ConfigParser() self.config.read(iniName) self.id = int(self.config["library"]["id"]) instrumentSections = self.config.sections() instrumentSections.remove("library") self.instrumentsInLibraryCount = len(instrumentSections) for iniSection in instrumentSections: instrId = int(self.config[iniSection]["id"]) instrObj = Instrument(self, self.id, self.config[iniSection], tarFilePath) instrObj.instrumentsInLibraryCount = self.instrumentsInLibraryCount self.instruments[instrId] = instrObj #We only parsed the metadata here. No instruments are loaded yet. #At a later point Instrument.loadSamples() must be called. This is done in the API etc. def updateWithNewParse(self, newLib): #newlib is type Library / self """We parsed a new version of our file. We are the old version. This libary will remain loaded but maybe there are new variants in the .tar. Variants are loaded with new access to the .tar , so we just need to parse the new ini here and update our internal representation. newLib contains the newly parsed ini data. We only want the new metadata from the ini. newLib also generated new metaInstruments (without any samples loaded) that will just get discarded. """ assert self.tarFilePath == newLib.tarFilePath or newLib.tarFilePath.samefile(self.tarFilePath), (self.tarFilePath, newLib.tarFilePath) if newLib.config["library"]["version"] > self.config["library"]["version"]: self.config = newLib.config for newInstrId, newInstrument in newLib.instruments.items(): if newInstrId in self.instruments: assert newInstrument.defaultVariant == self.instruments[newInstrId].defaultVariant, (newInstrument.defaultVariant, self.instruments[newInstrId].defaultVariant) #this is by Tembro-Design. Never change existing instruments. for newVariant in newInstrument.variants: #string if not newVariant in self.instruments[newInstrId].variants: logger.info(f"Found a new variant {newVariant} while parsing updated version of library {newLib.tarFilePath} and instrument {newInstrument.metadata['name']}") self.instruments[newInstrId].variants.append(newVariant) else: logger.info(f"Found a new instrument {newInstrument.metadata['name']} while parsing updated version of library {newLib.tarFilePath}") self.instruments[newInstrId] = newInstrument #Update various values #print ("Old count, new count", self.instrumentsInLibraryCount, newLib.instrumentsInLibraryCount) self.instrumentsInLibraryCount = newLib.instrumentsInLibraryCount for instr in self.instruments.values(): instr.instrumentsInLibraryCount = newLib.instrumentsInLibraryCount elif newLib.config["library"]["version"] < self.config["library"]["version"]: raise ValueError(f"""Attempted to 'update' library {self.tarFilePath} version {self.config["library"]["version"]} with older version {newLib.config["library"]["version"]}. This is not allowed in Tembro during runtime. Aborting program.""") else: #otherwise this is literally the same file with the same id and same version. -> no action required. pass def transferOldState(self, oldLib): #oldLib is type Library / self """We are a newly parsed Library that will replace an existing one. The new and old library ID are the same, but the filepath is different. The old one maybe has loaded instruments. We will load these instruments. oldLib contains the state up until now. This happens when the sample dir is changed during runtime with the same, or updated, files. In opposite to updateWithNewParse we don't need to worry about incrementally getting new instruments and variants. We just load the whole lib and then pick runtime data from the old one, before discarding it. """ for instrId, oldInstrument in oldLib.instruments.items(): #Use another instrument instance to copy the soft values. #Not really sampler internal CC but at least everything we control on our own. ourNewInstrument = self.instruments[instrId] assert oldInstrument.idKey == ourNewInstrument.idKey, (oldInstrument.idKey, ourNewInstrument.idKey) if oldInstrument.enabled: #We need to deactivate the old lib before activating the new one because we have the #same ids and names, which results in the same jack ports. tmpCurVar = oldInstrument.currentVariant tmpKeySw = oldInstrument.currentKeySwitch tmpMix = oldInstrument.mixerLevel oldInstrument.disable() #frees jack port names ourNewInstrument.enable() ourNewInstrument.chooseVariant(tmpCurVar) if tmpKeySw: ourNewInstrument.setKeySwitch(tmpKeySw) ourNewInstrument.mixerLevel = tmpMix elif ourNewInstrument.enabled and not oldInstrument.enabled: ourNewInstrument.disable() def exportMetadata(self)->dict: """Return a dictionary with each key is an instrument id, but also a special key "library" with our own metadata. Allows the callbacks receiver to construct a hierarchy""" result = {} libDict = {} result["library"] = libDict #Explicit is better than implicit assert int(self.config["library"]["id"]) == self.id, (int(self.config["library"]["id"]), self.id) libDict["tarFilePath"] = self.tarFilePath libDict["id"] = int(self.config["library"]["id"]) libDict["name"] = self.config["library"]["name"] libDict["description"] = self.config["library"]["description"] libDict["license"] = self.config["library"]["license"] libDict["vendor"] = self.config["library"]["vendor"] libDict["version"] = self.config["library"]["version"] #this is not the upstream sfz version of the sample creator but our own library index. flat integers as counters. higher is newer. for instrument in self.instruments.values(): result[instrument.id] = instrument.exportMetadata() #another dict return result #Save def serialize(self)->dict: """The library-obj is already constructed from static default values. We only save what differs from the default, most important the state of the instruments. If a library gets a new instrument in between tembro-runs it will just use default values. """ return { "id" : self.id, #for convenience access "instruments" : [instr.serialize() for instr in self.instruments.values()] }