diff --git a/commune/cli.py b/commune/cli.py index 90120eb14..68058b49d 100644 --- a/commune/cli.py +++ b/commune/cli.py @@ -48,6 +48,8 @@ def determine_type(x): except ValueError: pass return x + + def forward(argv = None, sep = '--', fn_splitters = [':', '/', '//', '::'], @@ -72,6 +74,7 @@ def forward(argv = None, argv.remove(arg) init_kwargs[key] = determine_type(value) # any of the --flags are init kwargs + fn = argv.pop(0).replace('-', '_') module = c.module(base) fs = [fs for fs in fn_splitters if fs in fn] @@ -132,5 +135,8 @@ def forward(argv = None, else: c.print(output) return output + + def main(): - forward() \ No newline at end of file + forward() + \ No newline at end of file diff --git a/commune/client.py b/commune/client.py index 8313b2229..c55db958f 100644 --- a/commune/client.py +++ b/commune/client.py @@ -105,7 +105,6 @@ def request(self, url: str, stream: bool = True): try: response = self.session.post(url, json=data, headers=headers, timeout=timeout, stream=stream) - if 'text/event-stream' in response.headers.get('Content-Type', ''): return self.stream(response) if 'application/json' in response.headers.get('Content-Type', ''): @@ -132,7 +131,6 @@ def get_data(self, args=[], kwargs={}, **extra_kwargs): kwargs = {**kwargs, **extra_kwargs} data = { "args": args, "kwargs": kwargs} data = self.serializer.serialize(data) - return data def forward(self, @@ -155,8 +153,12 @@ def forward(self, 'crypto_type': str(key.crypto_type), 'time': str(c.time()) } + headers['signature'] = key.sign({'data': headers['hash'], 'time': headers['time']}).hex() - return self.request(url=url, data=data,headers=headers, timeout=timeout) + return self.request(url=url, + data=data, + headers=headers, + timeout=timeout) def __del__(self): try: @@ -223,4 +225,79 @@ def __getattr__(self, key): return getattr(self, key) else: return lambda *args, **kwargs : self.remote_call(*args, remote_fn=key, **kwargs) - \ No newline at end of file + + + def forcurl(self, + fn: str = 'info', + args: list = None, + kwargs: dict = None, + timeout: int = 2, + key: str = None, + **extra_kwargs) -> str: + """ + Generate a cURL command for the equivalent HTTP request + + Args: + fn (str): Function name to call + args (list): Arguments list + kwargs (dict): Keyword arguments + timeout (int): Request timeout in seconds + key (str): Key for authentication + **extra_kwargs: Additional keyword arguments + + Returns: + str: cURL command string + """ + # Resolve the key and URL + key = self.resolve_key(key) + url = self.get_url(fn=fn) + + # Prepare the data + data = self.get_data(args=args or [], kwargs=kwargs or {}, **extra_kwargs) + + # Prepare headers + headers = { + 'Content-Type': 'application/json', + 'key': key.ss58_address, + 'hash': c.hash(data), + 'crypto_type': str(key.crypto_type), + 'time': str(c.time()) + } + + # Add signature + headers['signature'] = key.sign({ + 'data': headers['hash'], + 'time': headers['time'] + }).hex() + + # Build curl command + curl_cmd = ['curl'] + + # Add method + curl_cmd.append('-X POST') + + # Add headers + for header_name, header_value in headers.items(): + curl_cmd.append(f"-H '{header_name}: {header_value}'") + + # Add data + if isinstance(data, str): + data_str = data + else: + data_str = json.dumps(data) + curl_cmd.append(f"-d '{data_str}'") + + # Add URL + curl_cmd.append(f"'{url}'") + + # Add timeout + curl_cmd.append(f'--max-time {timeout}') + + # now get the dict of the response and return it + # make the request in the os and return the response + import os + response = os.popen(' '.join(curl_cmd)).read() + + + return response + \ No newline at end of file diff --git a/commune/key.py b/commune/key.py index b6589d928..43e4c06cf 100644 --- a/commune/key.py +++ b/commune/key.py @@ -34,6 +34,8 @@ class KeyType: KeyType.crypto_types = [k for k in KeyType.__dict__.keys() if not k.startswith('_')] KeyType.crypto_type_map = {k.lower():v for k,v in KeyType.__dict__.items() if k in KeyType.crypto_types } +KeyType.crypto_types = list(KeyType.crypto_type_map.keys()) + class MnemonicLanguageCode: ENGLISH = 'en' CHINESE_SIMPLIFIED = 'zh-hans' @@ -47,10 +49,12 @@ class MnemonicLanguageCode: class Key(c.Module): crypto_types = KeyType.crypto_types crypto_type_map = KeyType.crypto_type_map + crypto_types = list(crypto_type_map.keys()) ss58_format = 42 crypto_type = 'sr25519' - def __init__(self,private_key: Union[bytes, str] = None, + def __init__(self, + private_key: Union[bytes, str] = None, ss58_format: int = ss58_format, crypto_type: int = crypto_type, derive_path: str = None, @@ -62,16 +66,25 @@ def __init__(self,private_key: Union[bytes, str] = None, derive_path=derive_path, path=path, **kwargs) + + @property + def short_address(self): + n = 4 + return self.ss58_address[:n] + '...' + self.ss58_address[-n:] + def set_crypto_type(self, crypto_type): crypto_type = self.resolve_crypto_type(crypto_type) if crypto_type != self.crypto_type: - return self.set_private_key(private_key=self.private_key, - crypto_type=crypto_type, - ss58_format=self.ss58_format, - derive_path=self.derive_path, - path=self.path) + kwargs = { + 'private_key': self.private_key, + 'ss58_format': self.ss58_format, + 'derive_path': self.derive_path, + 'path': self.path, + 'crypto_type': crypto_type # update crypto_type + } + return self.set_private_key(**kwargs) else: - raise ValueError(f'crypto_type {crypto_type} is already set') + return {'success': False, 'message': f'crypto_type already set to {crypto_type}'} def set_private_key(self, private_key: Union[bytes, str] = None, @@ -108,7 +121,7 @@ def set_private_key(self, key_address = ss58_encode(public_key, ss58_format=ss58_format) hash_type = 'ss58' elif crypto_type == KeyType.ED25519: - private_key = private_key[0:32] + private_key = private_key[:32] if len(private_key) == 64 else private_key public_key, private_key = ed25519_zebra.ed_from_seed(private_key) key_address = ss58_encode(public_key, ss58_format=ss58_format) hash_type = 'ss58' @@ -132,6 +145,8 @@ def set_private_key(self, self.path = path self.ss58_format = ss58_format self.key_address = self.ss58_address + self.key_type = self.crypto_type2name(self.crypto_type) + return {'key_address':key_address, 'crypto_type':crypto_type} @classmethod def add_key(cls, path:str, mnemonic:str = None, password:str=None, refresh:bool=False, private_key=None, **kwargs): @@ -149,11 +164,8 @@ def add_key(cls, path:str, mnemonic:str = None, password:str=None, refresh:bool= return json.loads(key_json) @classmethod - def rename_key(self, new_path): - return self.mv_key(self.path, new_path) - - def ticket(self, data=None, **kwargs): - return self.sign({'data':data, 'time': c.time()} , to_json=True, **kwargs) + def ticket(cls , data=None, key=None, **kwargs): + return cls.get_key(key).sign({'data':data, 'time': c.time()} , to_json=True, **kwargs) @classmethod def mv_key(cls, path, new_path): @@ -165,8 +177,6 @@ def mv_key(cls, path, new_path): new_key = cls.get_key(new_path) return {'success': True, 'from': path , 'to': new_path, 'key': new_key} - rename_key = mv_key - @classmethod def copy_key(cls, path, new_path): assert cls.key_exists(path), f'key does not exist at {path}' @@ -188,6 +198,16 @@ def add_keys(cls, name, n=100, verbose:bool = False, **kwargs): return response + def key2encrypted(self): + keys = self.keys() + key2encrypted = {} + for k in keys: + key2encrypted[k] = self.is_key_encrypted(k) + return key2encrypted + + def encrypted_keys(self): + return [k for k,v in self.key2encrypted().items() if v == True] + @classmethod def key_info(cls, path='module', **kwargs): return cls.get_key_json(path) @@ -239,7 +259,11 @@ def key2mnemonic(cls, search=None) -> dict[str, str]: return mems @classmethod - def get_key(cls, path:str,password:str=None, create_if_not_exists:bool = True, crypto_type=crypto_type, **kwargs): + def get_key(cls, + path:str,password:str=None, + create_if_not_exists:bool = True, + crypto_type=crypto_type, + **kwargs): for k in ['crypto_type', 'key_type', 'type']: if k in kwargs: crypto_type = kwargs.pop(k) @@ -361,24 +385,13 @@ def rm_key(cls, key=None): raise Exception(f'key {key} not found, available keys: {keys}') c.rm(key2path[key]) return {'deleted':[key]} - - @property - def crypto_type_name(self): - return self.crypto_type2name(self.crypto_type).lower() - @property - def key_type(self): - return self.crypto_type2name(self.crypto_type).lower() @classmethod def crypto_name2type(cls, name:str): crypto_type_map = cls.crypto_type_map name = name.lower() - for k,v in crypto_type_map.items(): - if k.startswith(name.lower()): - return v - name = name.lower() if not name in crypto_type_map: raise ValueError(f'crypto_type {name} not supported {crypto_type_map}') return crypto_type_map[name] @@ -387,6 +400,10 @@ def crypto_name2type(cls, name:str): def crypto_type2name(cls, crypto_type:str): crypto_type_map ={v:k for k,v in cls.crypto_type_map.items()} return crypto_type_map[crypto_type] + + @classmethod + def resolve_crypto_type_name(cls, crypto_type): + return cls.crypto_type2name(cls.resolve_crypto_type(crypto_type)) @classmethod def resolve_crypto_type(cls, crypto_type): @@ -401,6 +418,10 @@ def resolve_crypto_type(cls, crypto_type): crypto_type = cls.crypto_name2type(crypto_type) return int(crypto_type) + @classmethod + def new_private_key(cls, crypto_type='ecdsa'): + return cls.new_key(crypto_type=crypto_type).private_key.hex() + @classmethod def new_key(cls, mnemonic:str = None, @@ -765,10 +786,7 @@ def export_to_encrypted_json(self, passphrase: str, name: str = None) -> dict: seperator = "::signature=" - def sign(self, - data: Union[ScaleBytes, bytes, str], - to_json = False, - ) -> bytes: + def sign(self, data: Union[ScaleBytes, bytes, str], to_json = False) -> bytes: """ Creates a signature for given data Parameters @@ -787,19 +805,14 @@ def sign(self, data = bytes.fromhex(data[2:]) elif type(data) is str: data = data.encode() - if not self.private_key: raise ConfigurationError('No private key set to create signatures') - if self.crypto_type == KeyType.SR25519: signature = sr25519.sign((self.public_key, self.private_key), data) - elif self.crypto_type == KeyType.ED25519: signature = ed25519_zebra.ed_sign(self.private_key, data) - elif self.crypto_type == KeyType.ECDSA: signature = ecdsa_sign(self.private_key, data) - else: raise ConfigurationError("Crypto type not supported") @@ -899,13 +912,12 @@ def verify(self, return ss58_encode(public_key, ss58_format=ss58_format) return verified - def resolve_encryption_password(self, password:str): + def resolve_encryption_password(self, password:str=None) -> str: if password == None: password = self.private_key if isinstance(password, str): password = password.encode() - password = hashlib.sha256(password).digest() - return password + return hashlib.sha256(password).digest() def resolve_encryption_data(self, data): if not isinstance(data, str): @@ -989,25 +1001,31 @@ def decrypt_message(self, encrypted_message_with_nonce: bytes, sender_public_key @classmethod def encrypt_key(cls, path = 'test.enc', password=None): assert cls.key_exists(path), f'file {path} does not exist' - password = password or c.hash(cls.generate_mnemonic()) + assert not cls.is_key_encrypted(path), f'{path} already encrypted' data = cls.get(path) enc_text = {'data': c.encrypt(data, password=password), 'encrypted': True} cls.put(path, enc_text) - return {'encrypted':enc_text, 'path':path } + return {'number_of_characters_encrypted':len(enc_text), 'path':path } @classmethod - def is_key_encrypted(cls, path, data=None): - data = data or cls.get(path) + def is_key_encrypted(cls, key, data=None): + data = data or cls.get(key) return cls.is_encrypted(data) @classmethod - def decrypt_key(cls, path = 'test.enc', password=None): + def decrypt_key(cls, path = 'test.enc', password=None, key=None): + assert cls.key_exists(path), f'file {path} does not exist' + assert cls.is_key_encrypted(path), f'{path} not encrypted' data = cls.get(path) assert cls.is_encrypted(data), f'{path} not encrypted' - enc_text = c.decrypt(data['data'], password=password) - cls.put(path, enc_text) - return {'encrypted':enc_text, 'path':path , 'password':password} + dec_text = c.decrypt(data['data'], password=password) + cls.put(path, dec_text) + assert not cls.is_key_encrypted(path), f'failed to decrypt {path}' + loaded_key = c.get_key(path) + return { 'path':path , + 'key_address': loaded_key.ss58_address, + 'crypto_type': loaded_key.crypto_type} @classmethod def get_mnemonic(cls, key): @@ -1118,7 +1136,20 @@ def valid_h160_address(cls, address): return False return True + + def storage_migration(self): + key2path = self.key2path() + new_key2path = {} + for k_name, k_path in key2path.items(): + try: + key = c.get_key(k_name) + new_k_path = '/'.join(k_path.split('/')[:-1]) + '/' + f'{k_name}_address={key.ss58_address}_type={key.crypto_type}.json' + new_key2path[k_name] = new_k_path + except Exception as e: + c.print(f'failed to migrate {k_name} due to {e}', color='red') + + return new_key2path # if __name__ == "__main__": # Key.run() diff --git a/commune/module.py b/commune/module.py index b62a8b7ad..b7d4dd0ca 100755 --- a/commune/module.py +++ b/commune/module.py @@ -10,34 +10,28 @@ from copy import deepcopy from typing import * import nest_asyncio -import asyncio nest_asyncio.apply() class c: - splitters = [':', '/', '.'] + libname = lib = __file__.split('/')[-2]# the name of the library endpoints = ['ask', 'generate', 'forward'] core_features = ['module_name', 'module_class', 'filepath', 'dirpath', 'tree'] - lib_name = libname = lib = __file__.split('/')[-3]# the name of the library organization = org = orgname = 'commune-ai' # the organization - git_host = 'https://github.com' cost = 1 description = """This is a module""" base_module = 'module' # the base module + git_host = 'https://github.com' giturl = f'{git_host}/{org}/{libname}.git' # tge gutg - root_module_class = 'c' # WE REPLACE THIS THIS Module at the end, kindof odd, i know, ill fix it fam, chill out dawg, i didnt sleep with your girl default_port_range = [50050, 50150] # the port range between 50050 and 50150 default_ip = local_ip = loopback = '0.0.0.0' - src_path = source_path = rootpath = root_path = root = '/'.join(__file__.split('/')[:-1]) - home_path = homepath = os.path.expanduser('~') # the home path - lib_path = libpath = os.path.dirname(root_path) # the path to the library - repo_path = repopath = os.path.dirname(root_path) # the path to the repo - modules_path = os.path.dirname(__file__) + '/modules' - docs_path = libname + '/docs' + rootpath = root_path = root = '/'.join(__file__.split('/')[:-1]) + homepath = home_path = os.path.expanduser('~') # the home path + libpath = lib_path = os.path.dirname(root_path) # the path to the library + repopath = repo_path = os.path.dirname(root_path) # the path to the repo + modulespath = modules_path = os.path.dirname(__file__) + '/modules' + docspath = docs_path = libname + '/docs' + storagepath = storage_path = os.path.expanduser(f'~/.{libname}') cache = {} # cache for module objects - home = os.path.expanduser('~') # the home directory - __ss58_format__ = 42 # the ss58 format for the substrate address - storage_path = os.path.expanduser(f'~/.{libname}') - default_tag = 'base' shortcuts = { 'openai' : 'model.openai', 'openrouter': 'model.openrouter', @@ -49,61 +43,56 @@ class c: 'local': 'network', 'network.local': 'network', } + splitters = [':', '/', '.'] + @classmethod def module(cls, - path:str = 'module', + path:str = 'module', + shortcuts : dict = None, cache=True, trials=1, - **_kwargs ) -> str: - if path == None: - path = 'module' + tree:dict=None, + **extra_kwargs ) -> str: + path = path or 'module' if path.endswith('.py'): path = c.path2name(path) + else: + path = path.replace('/','.') + og_path = path - path = path or 'module' - t0 = time.time() - og_path = path + if path in c.module_cache and cache: return c.module_cache[path] - - if path in ['module', 'c']: + if path in ['module', c.libname[0]]: return c - - tree = c.tree() - path = c.shortcuts.get(path, path) + tree = tree or c.tree() path = tree.get(path, path) + shortcuts = shortcuts or c.shortcuts + path = shortcuts.get(path, path) try: module = c.import_object(path) except Exception as e: if trials == 0: raise ValueError(f'Error in module {og_path} {e}') - return c.module(path, - cache=cache, - tree=tree, - trials=trials-1) - latency = c.round(time.time() - t0, 3) + return c.module(path,cache=cache, tree=c.tree(max_age=10), trials=trials-1) if not hasattr(module, 'module_name'): - module.module_name = module.name = lambda *args, **kwargs : c.module_name(module) module.module_class = lambda *args, **kwargs : c.module_class(module) + module.module_name = module.name = lambda *args, **kwargs : c.module_name(module) + module.key = c.get_key(module.module_name(), create_if_not_exists=True) module.resolve_object = lambda *args, **kwargs : c.resolve_object(module) module.filepath = lambda *args, **kwargs : c.filepath(module) module.dirpath = lambda *args, **kwargs : c.dirpath(module) module.code = lambda *args, **kwargs : c.code(module) module.schema = lambda *args, **kwargs : c.schema(module) - module.functions = module.fns = lambda *args, **kwargs : c.get_functions(module) - module.params = lambda *args, **kwargs : c.params(module) - module.key = c.get_key(module.module_name(), create_if_not_exists=True) + module.fns = module.functions = lambda *args, **kwargs : c.get_functions(module) module.fn2code = lambda *args, **kwargs : c.fn2code(module) - module.help = lambda *args, **kwargs : c.help(*args, module=module, **kwargs) + module.ask = lambda *args, **kwargs : c.ask(*args, module=module, **kwargs) + module.config = lambda *args, **kwargs : c.config(module=module, **kwargs) if cache: c.module_cache[path] = module return module - - get_module = module + block = get_block = get_module = module - def __init__(self, *args, **kwargs): - pass - @classmethod def filepath(cls, obj=None) -> str: obj = cls.resolve_object(obj) @@ -113,47 +102,37 @@ def filepath(cls, obj=None) -> str: c.print(f'Error: {e} {cls}', color='red') module_path = inspect.getfile(cls) return module_path - - file_path = filepath + @classmethod def dirpath(cls, obj=None) -> str: return os.path.dirname(cls.filepath(obj)) + dir_path = dirpath @classmethod def module_name(cls, obj=None): obj = obj or cls module_file = inspect.getfile(obj) return c.path2name(module_file) - + path = name = module_name + def vs(self, path = None): path = path or c.libpath path = c.abspath(path) return c.cmd(f'code {path}') - - @classmethod - def get_module_name(cls, obj=None): - obj = cls.resolve_object(obj) - if hasattr(obj, 'module_name'): - return obj.module_name - else: - return cls.__name__ - - path = name = module_name @classmethod - def module_class(cls) -> str: - return cls.__name__ - + def module_class(cls, obj=None) -> str: + return (obj or cls).__name__ + @classmethod def class_name(cls, obj= None) -> str: obj = obj if obj != None else cls return obj.__name__ - classname = class_name - @classmethod - def config_path(cls) -> str: - return cls.filepath()[:-3] + '.yaml' + def config_path(cls, obj = None) -> str: + obj = obj or cls + return obj.filepath()[:-3] + '.yaml' @classmethod def sandbox(cls, path='./', filename='sandbox.py'): @@ -167,7 +146,6 @@ def sandbox(cls, path='./', filename='sandbox.py'): module_cache = {} _obj = None - def sync(self): return {'tree': c.tree(update=1), 'namespace':c.namespace(update=1), 'ip': c.ip()} @@ -181,29 +159,6 @@ def storage_dir(cls): @classmethod def __str__(cls): return cls.__name__ - - @classmethod - def root_address(cls, name:str='module', - network : str = 'local', - timeout:int = 100, - sleep_interval:int = 1, - **kwargs): - """ - Root module - """ - try: - if not c.server_exists(name, network=network): - c.serve(name, network=network, wait_for_server=True, **kwargs) - address = c.call('module/address', network=network, timeout=timeout) - ip = c.ip() - address = ip+':'+address.split(':')[-1] - except Exception as e: - c.print(f'Error: {e}', color='red') - address = None - return address - - addy = root_address - @classmethod def is_module(cls, obj=None) -> bool: @@ -215,10 +170,8 @@ def is_module(cls, obj=None) -> bool: @classmethod def is_root(cls, obj=None) -> bool: - required_features = c.core_features obj = obj or cls - return bool(c.is_module(obj) and obj.module_class() == cls.root_module_class) - + return bool(c.is_module(obj) and obj.module_class() == c.module_class()) def print( *text:str, **kwargs): if len(text) == 0: @@ -237,16 +190,18 @@ def is_error( *text:str, **kwargs): @classmethod def resolve_object(cls, obj:str = None, **kwargs): - if isinstance(obj, str): - if c.object_exists(obj): - return c.obj(obj) - if c.module_exists(obj): - return c.module(obj) if obj == None: - if cls._obj != None: - return cls._obj - else: - obj = cls + obj = cls._obj if cls._obj else cls + elif isinstance(obj, str): + if c.object_exists(obj): + obj = c.obj(obj) + elif c.module_exists(obj): + obj = c.module(obj) + elif c.is_fn(obj): + obj = c.get_fn(obj) + + assert obj != None, f'Object {obj} does not exist' + return obj @classmethod @@ -389,8 +344,11 @@ def get_key(cls,key:str = None , **kwargs) -> None: key = get_key @classmethod - def files(cls, path='./', search:str = None, - avoid_terms = ['__pycache__', '.git', '.ipynb_checkpoints', 'node_modules', 'artifacts'], **kwargs) -> List[str]: + def files(cls, + path='./', + search:str = None, + avoid_terms = ['__pycache__', '.git', '.ipynb_checkpoints', 'node_modules', 'artifacts', 'egg-info'], + **kwargs) -> List[str]: files =c.glob(path, **kwargs) files = [f for f in files if not any([at in f for at in avoid_terms])] if search != None: @@ -474,7 +432,14 @@ def init_module(self,*args, **kwargs): @classmethod def utils(cls, search=None): - utils = c.find_functions(c.root_path + '/utils') + utils = c.find_functions(c.rootpath + '/utils') + if search != None: + utils = [u for u in utils if search in u] + return sorted(utils) + + @classmethod + def get_utils(cls, search=None): + utils = c.find_functions(c.rootpath + '/utils') if search != None: utils = [u for u in utils if search in u] return sorted(utils) @@ -539,19 +504,13 @@ def get_routes(cls): routes = getattr(cls, 'routes') if callable(routes): routes = routes() - - def add_utils(): - utils = c.utils() - for util in utils: - k = '.'.join(util.split('.')[:-1]) - v = util.split('.')[-1] - routes[k] = routes.get(k , []) - routes[k].append(v) - return routes - - add_utils() + for util in c.utils(): + k = '.'.join(util.split('.')[:-1]) + v = util.split('.')[-1] + routes[k] = routes.get(k , []) + routes[k].append(v) return routes - + @classmethod def fn2route(cls): routes = cls.get_routes() @@ -584,7 +543,7 @@ def fn(*args, **kwargs): except: module = '.'.join(route.split('.')[:-1]) fn = route.split('.')[-1] - module = c.get_module(module) + module = c.module(module) fn_obj = getattr(module, fn) if c.classify_fn(fn_obj) == 'self': fn_obj = getattr(module(), fn) @@ -694,39 +653,18 @@ def config_exists(self, path:str=None) -> bool: return self.path_exists(path) @classmethod - def config(cls) -> 'Munch': + def config(cls, module=None, to_munch=True) -> 'Munch': ''' Returns the config ''' - config = cls.load_config() - if not config: - if hasattr(cls, 'init_kwargs'): - config = cls.init_kwargs() # from _schema.py - else: - config = {} - return config - - @classmethod - def load_config(cls, path:str=None, - default=None, - to_munch:bool = True - ) -> Union['Munch', Dict]: - ''' - Args: - path: The path to the config file - to_munch: If true, then convert the config to a munch - ''' - - default = default or {} - path = path if path else cls.config_path() - + module = module or cls + path = module.config_path() if os.path.exists(path): - config = cls.load_yaml(path) + config = c.load_yaml(path) else: - config = default - config = config or {} + config = c.init_kwargs(module) if to_munch: - config = cls.dict2munch(config) + config = c.dict2munch(config) return config @classmethod @@ -759,10 +697,6 @@ def has_config(cls) -> bool: except: return False - @classmethod - def config_path(cls) -> str: - return os.path.abspath('./config.yaml') - def update_config(self, config): self.config.update(config) return self.config @@ -784,14 +718,32 @@ def put_json(cls, data = json.dumps(data) cls.put_text(path, data) return path - save_json = put_json @classmethod - def rm(cls, path,possible_extensions = ['json'], avoid_paths = ['~', '/']): - path = cls.resolve_path(path=path) - avoid_paths = [cls.resolve_path(p) for p in avoid_paths] + def map(cls, x, fn): + if isinstance(x, dict): + return {k:fn(v) for k,v in x.items()} + elif isinstance(x, list): + return [fn(v) for v in x] + else: + raise ValueError(f'Cannot map {x}') + + def test_map(self): + x = {'a':1, 'b':2} + fn = lambda x: x+1 + assert self.map(x, fn) == {'a':2, 'b':3} + x = [1,2,3] + assert self.map(x, fn) == [2,3,4] + return {'success':True, 'message':'map test passed'} + + avoid_paths = ['~', '/', './', storage_path] + @classmethod + def rm(cls, path,possible_extensions = ['json'], avoid_paths = avoid_paths): + avoid_paths.append(c.storage_path) + path = cls.resolve_path(path) + avoid_paths = [cls.resolve_path(p) for p in avoid_paths] assert path not in avoid_paths, f'Cannot remove {path}' if not os.path.exists(path): for pe in possible_extensions: @@ -805,7 +757,6 @@ def rm(cls, path,possible_extensions = ['json'], avoid_paths = ['~', '/']): if os.path.isfile(path): os.remove(path) assert not os.path.exists(path), f'{path} was not removed' - return {'success':True, 'message':f'{path} removed'} @classmethod @@ -820,27 +771,18 @@ def glob(cls, path =None, files_only:bool = True, recursive:bool=True): return paths @classmethod - def get_json(cls, - path:str, - default:Any=None, - **kwargs): - path = cls.resolve_path(path=path, extension='json') + def get_json(cls, path:str,default:Any=None, **kwargs): + path = cls.resolve_path(path) + if not os.path.exists(path): + if not path.endswith('.json'): + path = path + '.json' try: - data = cls.get_text(path, **kwargs) + with open(path, 'r') as file: + data = json.load(file) except Exception as e: - return default - if isinstance(data, str): - try: - data = json.loads(data) - except Exception as e: - return default - if isinstance(data, dict): - if 'data' in data and 'meta' in data: - data = data['data'] + data = default return data - @classmethod - async def async_get_json(cls,*args, **kwargs): - return cls.get_json(*args, **kwargs) + load_json = get_json @classmethod @@ -951,14 +893,14 @@ def put(cls, if not c.jsonable(v): v = c.serialize(v) - data = {'data': v, 'encrypted': encrypt, 'timestamp': cls.timestamp()} + data = {'data': v, 'encrypted': encrypt, 'timestamp': time.time()} # default json getattr(cls,f'put_{mode}')(k, data) data_size = cls.sizeof(v) - return {'k': k, 'data_size': data_size, 'encrypted': encrypt, 'timestamp': cls.timestamp()} + return {'k': k, 'data_size': data_size, 'encrypted': encrypt, 'timestamp': time.time()} @classmethod def get(cls, @@ -966,8 +908,7 @@ def get(cls, default: Any=None, mode:str = 'json', max_age:str = None, - cache :bool = False, - full :bool = False, + full :bool = False, update :bool = False, password : str = None, verbose = False, @@ -975,14 +916,9 @@ def get(cls, ''' Puts a value in sthe config, with the option to encrypt it - Return the value ''' - if cache: - if k in cls.cache: - return cls.cache[k] data = getattr(cls, f'get_{mode}')(k,default=default, **kwargs) - if password != None: assert data['encrypted'] , f'{k} is not encrypted' @@ -990,27 +926,25 @@ def get(cls, data = data or default - if isinstance(data, dict): - if update: - max_age = 0 - if max_age != None: - timestamp = data.get('timestamp', None) - if timestamp != None: - age = int(time.time() - timestamp) - if age > max_age: # if the age is greater than the max age - c.print(f'{k} is too old ({age} > {max_age})', verbose=verbose) - return default - else: - data = default - + if not isinstance(data, dict): + return default + if update: + max_age = 0 + if max_age != None: + timestamp = 0 + for k in ['timestamp', 'time']: + if k in data: + timestamp = data[k] + break + age = int(time.time() - timestamp) + if age > max_age: # if the age is greater than the max age + c.print(f'{k} is too old ({age} > {max_age})', verbose=verbose) + return default + if not full: if isinstance(data, dict): if 'data' in data: data = data['data'] - - # local cache - if cache: - cls.cache[k] = data return data def get_age(self, k:str) -> int: @@ -1112,6 +1046,20 @@ def fn2code(cls, search=None, module=None)-> Dict[str, str]: print(f'Error: {e}') return fn_code_map + @classmethod + def getsource(cls, fn): + obj = None + if isinstance(fn, str): + if c.object_exists(fn): + obj = c.obj(fn) + elif c.module_exists(fn): + obj = c.module(fn) + + + assert obj != None, f'{fn} is not a function or object' + return inspect.getsource(obj) + + @classmethod def fn_code(cls,fn:str, **kwargs) -> str: ''' @@ -1160,50 +1108,24 @@ def get_parents(cls, obj = None,recursive=True, avoid_classes=['object']) -> Lis return parents @classmethod - def fn_schema(cls, fn:str, - defaults:bool=True, - docs:bool = True, **kwargs)->dict: + def schema(cls, fn:str = '__init__', **kwargs)->dict: ''' Get function schema of function in cls ''' - fn_schema = {} + schema = {} fn = cls.get_fn(fn) - input_schema = c.fn_signature(fn) - for k,v in input_schema.items(): - v = str(v) - if v.startswith(' 0 - def n_fns(self, search = None): return len(self.fns(search=search)) @@ -1413,20 +1315,19 @@ def is_fn(cls, fn, splitters = [':', '/', '.']): return callable(fn) @classmethod - def get_fn(cls, fn:str, init_kwargs = None, splitters=splitters): + def get_fn(cls, fn:str, splitters=[":", "/"]) -> 'Callable': """ Gets the function from a string or if its an attribute """ if isinstance(fn, str): - if c.object_exists(fn): - return c.obj(fn) - elif hasattr(cls, fn): + if hasattr(cls, fn): fn2route = cls.fn2route() if fn in fn2route: return c.obj(fn2route[fn]) - # step 3, if the function is routed return getattr(cls, fn) - + elif c.object_exists(fn): + return c.obj(fn) + for splitter in splitters: if splitter in fn: module_name= splitter.join(fn.split(splitter)[:-1]) @@ -1485,7 +1386,7 @@ def classify_fn(cls, fn): fn = cls.get_fn(fn) if not callable(fn): return 'cls' - args = cls.get_args(fn) + args = c.get_args(fn) if len(args) == 0: return 'property' if args[0] == 'self': @@ -1518,12 +1419,7 @@ def is_imported(package:str) : def is_parent(cls, obj=None): obj = obj or cls return bool(obj in cls.get_parents()) - - @classmethod - def find_code_lines(cls, search:str = None , module=None) -> List[str]: - module_code = cls.get_module(module).code() - return cls.find_lines(search=search, text=module_code) - + @classmethod def find_lines(self, text:str, search:str) -> List[str]: """ @@ -1814,6 +1710,10 @@ def import_module(cls, c.ensure_sys_path() return import_module(import_path) + @classmethod + def is_module(cls, path:str): + return os.path.isdir(path) or path.endswith('.py') + @classmethod def import_object(cls, key:str, **kwargs)-> Any: ''' Import an object from a string with the format of {module_path}.{object}''' @@ -1838,11 +1738,19 @@ def module_exists(cls, module:str, **kwargs) -> bool: ''' Returns true if the module exists ''' + try: module = c.shortcuts.get(module, module) return os.path.exists(c.name2path(module)) except Exception as e: - return False + module_exists = False + + try: + module_exists = bool(c.import_module(module)) + except Exception as e: + module_exists = False + + return module_exists @classmethod def has_app(cls, module:str, **kwargs) -> bool: @@ -1945,20 +1853,24 @@ def core_modules(cls, search=None, depth=10000, **kwargs): def get_modules(cls, search=None, **kwargs): return list(cls.tree(search=search, **kwargs).keys()) _modules = None + + def n(self, search=None): + return len(c.modules(search=search)) @classmethod def modules(cls, search=None, cache=True, - max_age=60, - update=False, **kwargs)-> List[str]: + max_age=60, + update=False, **extra_kwargs)-> List[str]: modules = cls._modules modules = cls.get('modules', max_age=max_age, update=update) if not cache or modules == None: - modules = cls.get_modules(search=None, **kwargs) + modules = cls.get_modules(search=None, **extra_kwargs) cls.put('modules', modules) if search != None: modules = [m for m in modules if search in m] return modules + blocks = modules @classmethod def has_module(cls, module, path=None): @@ -2043,11 +1955,6 @@ def get_util(cls, util:str, prefix='commune.utils'): def root_key(cls): return cls.get_key() - @classmethod - def root_key_address(cls) -> str: - return cls.root_key().ss58_address - - @staticmethod def round(x, sig=6, small_value=1.0e-9): import math @@ -2060,76 +1967,54 @@ def round(x, sig=6, small_value=1.0e-9): """ return round(x, sig - int(math.floor(math.log10(max(abs(x), abs(small_value))))) - 1) - @classmethod - def is_root_key(cls, address:str)-> str: - return address == cls.root_key().ss58_address - - @classmethod - def folder_structure(cls, path:str='./', search='py', max_depth:int=5, depth:int=0)-> dict: - import glob - files = cls.glob(path + '/**') - results = [] - for file in files: - if os.path.isdir(file): - cls.folder_structure(file, search=search, max_depth=max_depth, depth=depth+1) - else: - if search in file: - results.append(file) - return results - str2hash = hash - def set_api_key(self, api_key:str, cache:bool = True): - api_key = os.getenv(str(api_key), None) - if api_key == None: - api_key = self.get_api_key() - self.api_key = api_key - if cache: - self.add_api_key(api_key) - assert isinstance(api_key, str) - - def add_repo(self, repo:str, path:str=None, **kwargs): return c.cmd(f'git clone {repo} {path}', **kwargs) - def add_api_key(self, api_key:str): - assert isinstance(api_key, str) - path = self.resolve_path('api_keys') + def add_api_key(self, api_key:str, module=None): + path = self.get_api_keys_path(module) api_keys = self.get(path, []) api_keys.append(api_key) api_keys = list(set(api_keys)) self.put(path, api_keys) return {'api_keys': api_keys} - def set_api_keys(self, api_keys:str): + def set_api_keys(self, api_keys:str, module:str=None): + path = self.get_api_keys_path(module) api_keys = list(set(api_keys)) - return self.put('api_keys', api_keys) - - def rm_api_key(self, api_key:str): - assert isinstance(api_key, str) - api_keys = self.get(self.resolve_path('api_keys'), []) - for i in range(len(api_keys)): - if api_key == api_keys[i]: - api_keys.pop(i) - break - path = self.resolve_path('api_keys') return self.put(path, api_keys) + + + def rm_api_key(self, api_key:str, module:str=None): + module = module or self.module_name() + api_keys = self.api_keys(module=module) + n = len(api_keys) + if isinstance(api_key, int): + api_key = api_keys[api_key] + if api_key in api_keys: + api_keys.remove(api_key) + self.set_api_keys(api_keys, module=module) + else: + return {'error': f'api_key {api_key} not found'} + + assert len(self.api_keys(module)) == n - 1, f'Error removing api key {api_key}' + return {'api_keys': api_keys} def get_api_key(self, module=None): - if module != None: - self = c.module(module) - api_keys = self.api_keys() - if len(api_keys) == 0: - raise - else: - return self.choice(api_keys) + return c.choice(self.api_keys(module)) + + def get_api_keys_path(self, module:str=None): + module = module or self.module_name() + return c.resolve_path(f'api_keys/{module}') - def api_keys(self): - return self.get(self.resolve_path('api_keys'), []) + def api_keys(self, module=None): + path = self.get_api_keys_path(module) + return c.get(path, []) - def rm_api_keys(self): - self.put(self.resolve_path('api_keys'), []) - return {'api_keys': []} + def rm_api_keys(self, module=None): + path = self.get_api_keys_path(module) + return c.put(path, []) @classmethod def remote_fn(cls, @@ -2176,24 +2061,29 @@ def resolve_extension( filename:str, extension = '.py') -> str: return filename + extension @classmethod - def help(cls, *text, module=None, global_context=f'{rootpath}/docs', **kwargs): - if cls.module_name() == 'module': - return c.module('docs')().help(*text) + def help(cls, *text, module=None, **kwargs): text = ' '.join(map(str, text)) - if global_context != None: - text = text + str(c.file2text(global_context)) - module = module or cls.module_name() - context = c.code(module) - return c.ask(f'{context} {text} \n') - + code = c.code(module or cls.module_name()) + text = f'{code} {text}' + print('size of text', len(text)) + return c.ask(text, **kwargs) + def time(self): return time.time() + def repos(self): + return c.ls('~/') + + def clone(self, repo:str, path:str=None, **kwargs): + path = '~/' + repo if path == None else path + cmd = f'git clone {repo}' + + return c.cmd(f'git clone {repo} {path}', **kwargs) + def copy_module(self,module:str, path:str): code = c.code(module) path = os.path.abspath(path) import time - # put text one char at a time to the file # append the char to the code c.rm(path) @@ -2204,7 +2094,6 @@ def copy_module(self,module:str, path:str): with open(path, 'a') as f: f.write(char) return {'path': path, 'module': module} - def has_module(self, path:str): for path in c.files(path): @@ -2244,7 +2133,6 @@ def install(self, path ): def epoch(self, *args, **kwargs): return c.run_epoch(*args, **kwargs) - c.routes = { "vali": [ "run_epoch", @@ -2420,7 +2308,6 @@ def epoch(self, *args, **kwargs): "is_registered", "update_subnet", "my_subnets", - "my_netuids", "register_subnet", "registered_subnets", "registered_netuids" @@ -2437,3 +2324,4 @@ def epoch(self, *args, **kwargs): + diff --git a/commune/modules/agent/agent.py b/commune/modules/agent/agent.py deleted file mode 100644 index 0aae85762..000000000 --- a/commune/modules/agent/agent.py +++ /dev/null @@ -1,127 +0,0 @@ -import commune as c -import json -import os - -class Agent(c.Module): - anchor="OUTPUT" - def build(self, *args, path=c.docs_path): - text = self.args2text(args) - context = self.find_text(query=text, path=path) - prompt = f""" - {context} - AD START FINISH THE OUTPUT WITH THE ANCHOR TAGS - if you write a file so i can easily process it back - <{self.anchor}(path=filepath)> - you are totally fine using ./ if you are refering to the pwd for brevity - """ - output = '' - front_anchor = '" - back_anchor = f"" - context = c.modules() - prompt = f""" - QUERY - {query} - INSTRUCTION - get the top {n} files that match the query - instead of using the full {os.path.expanduser('~')}, use ~ - CONTEXT - {context} - OUTPUT - (JSON ONLY AND ONLY RESPOND WITH THE FOLLOWING INCLUDING THE ANCHORS SO WE CAN PARSE) - {front_anchor}{output_format}{back_anchor} - """ - output = '' - for ch in c.ask(prompt, model=model): - print(ch, end='') - output += ch - if ch == front_anchor: - break - if '```json' in output: - output = output.split('```json')[1].split('```')[0] - elif front_anchor in output: - output = output.split(front_anchor)[1].split(back_anchor)[0] - else: - output = output - output = json.loads(output) - assert len(output) > 0 - return output - - - def find_files(self, - query='', - output_format="DICT(data:list[str])" , - path='./', - n=5, - model='sonnet'): - front_anchor = f"<{self.anchor}>" - back_anchor = f"" - context = c.files(path) - prompt = f""" - QUERY - {query} - INSTRUCTION - get the top {n} files that match the query - instead of using the full {os.path.expanduser('~')}, use ~ - CONTEXT - {context} - OUTPUT - (JSON ONLY AND ONLY RESPOND WITH THE FOLLOWING INCLUDING THE ANCHORS SO WE CAN PARSE) - {front_anchor}{output_format}{back_anchor} - """ - output = '' - for ch in c.ask(prompt, model=model): - print(ch, end='') - output += ch - if ch == front_anchor: - break - if '```json' in output: - output = output.split('```json')[1].split('```')[0] - elif front_anchor in output: - output = output.split(front_anchor)[1].split(back_anchor)[0] - else: - output = output - output = json.loads(output)['data'] - assert len(output) > 0 - return output - - def batch_context(self, path='./', batch_size=20000): - - file2text = c.file2text(path) - file2size = {k:len(v) for k,v in file2text.items()} - current_size = 0 - batch_list = [] - files_batch = {} - for f, s in file2size.items(): - if (current_size + s) > batch_size: - batch_list += [files_batch] - files_batch = {} - current_size = 0 - current_size += s - files_batch[f] = c.get_text(path + f ) - return batch_list - - - diff --git a/commune/modules/chat/chat.py b/commune/modules/chat/chat.py deleted file mode 100644 index 5d9b1d085..000000000 --- a/commune/modules/chat/chat.py +++ /dev/null @@ -1,62 +0,0 @@ -import commune as c -import os - -class Chat(c.Module): - - def __init__(self, - max_tokens=420000, - prompt = 'The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly.', - model = None, - history_path='history', - **kwargs): - - self.max_tokens = max_tokens - self.prompt = prompt - self.model = c.module('model.openrouter')(model=model, **kwargs) - self.history_path = self.resolve_path(history_path) - - def generate(self, text = 'whats 2+2?' , model= 'anthropic/claude-3.5-sonnet', temperature= 0.5, max_tokens= 1000000,stream=True, ): - text = self.process_text(text) - return self.model.generate(text, stream=stream, model=model, max_tokens=max_tokens,temperature=temperature ) - - forward = generate - - def ask(self, *text, **kwargs): - text = ' '.join(list(map(str, text))) - return self.generate(text, **kwargs) - - def process_text(self, text): - new_text = '' - for word in text.split(' '): - if any([word.startswith(ch) for ch in ['.', '~', '/']]) and os.path.exists(word): - word = c.file2text(word) - print(word.keys()) - new_text += str(word) - return new_text - - - def summarize(self, path='./', max_chars=10000): - if c.module_exists(path): - c.print(f'Summarizing Module: {path}') - text = c.code(path) - elif os.path.isdir(path): - c.print(f'Summarizing DIRECTORY: {path}') - paths = c.ls(path) - for p in paths: - return self.summarize(p) - elif os.path.isfile(path): - c.print(f'Summarizing File: {path}') - text = c.file2text(path) - prompt = f''' - GOAL - summarize the following into tupples - CONTEXT - {text} - OUTPUT - ''' - return c.ask(prompt) - - - - def models(self): - return self.model.models() \ No newline at end of file diff --git a/commune/modules/py/api.py b/commune/modules/py/api.py deleted file mode 100644 index b40661deb..000000000 --- a/commune/modules/py/api.py +++ /dev/null @@ -1,19 +0,0 @@ - -import commune as c -py = c.module('py')() - -def verify(kwargs): - signature = kwargs['signature'] - assert c.verify(signature), 'Invalid signature.' - -class Api(c.Module): - - def create_env(self, env, **kwargs): - verify(kwargs) - '''Create a virtual environment.''' - return py.create_env(env) - - def remove_env(self, env, **kwargs): - '''Remove a virtual environment.''' - ticket = c.verify(**kwargs) - return py.remove_env(env) \ No newline at end of file diff --git a/commune/modules/sandbox.py b/commune/modules/sandbox.py deleted file mode 100644 index 510d0844a..000000000 --- a/commune/modules/sandbox.py +++ /dev/null @@ -1,3 +0,0 @@ -import commune as c - -c.print(c.get_key('subspace')) \ No newline at end of file diff --git a/commune/network/subspace/subspace.py b/commune/network/subspace/subspace.py index 6755a895d..ac256f888 100644 --- a/commune/network/subspace/subspace.py +++ b/commune/network/subspace/subspace.py @@ -53,7 +53,7 @@ def __init__( network=network, url: str = None, mode = 'wss', - num_connections: int = 5, + num_connections: int = 1, wait_for_finalization: bool = False, test = False, ws_options = {}, @@ -88,10 +88,8 @@ def switch(cls, network=None): code = code.replace(replace_str, new_str) c.put_text(filepath, code) cls.network = network - return {'current_network': network, 'past_network': og_network} - - switch_network = switch - + return {'network': network, 'og_network': og_network} + def set_network(self, network=None, mode = 'wss', @@ -122,6 +120,7 @@ def set_network(self, def set_connections(self, num_connections: int): self.connections_queue = queue.Queue(num_connections) self.num_connections = num_connections + print(f'Setting connections {num_connections}') try: for _ in range(self.num_connections): self.connections_queue.put(SubstrateInterface(self.url, ws_options=self.ws_options)) @@ -906,10 +905,9 @@ def compose_call( Raises: ChainTransactionError: If the transaction fails. """ - c.print(f'SUBSPACE({module}/{fn} network={self.network} url={self.url})') - c.print('PARAMS --> ',params) key = self.resolve_key(key) + c.print(f'Calling(module={module} fn={fn} network={self.network} key={key.key_address} params={params}', color='blue') if key is None and not unsigned: raise ValueError("Key must be provided for signed extrinsics.") @@ -1918,9 +1916,7 @@ def weights(self, subnet: int = 0, extract_value: bool = False ) -> dict[int, li ) return weights_dict - def addresses( - self, subnet: int = 0, extract_value: bool = False, max_age: int = 60, update: bool = False - ) -> dict[int, str]: + def addresses( self, subnet: int = 0, extract_value: bool = False, max_age: int = 60, update: bool = False ) -> dict[int, str]: subnet = self.resolve_subnet(subnet) addresses = self.query_map("Address", [subnet], extract_value=extract_value, max_age=max_age, update=update) sorted_uids = list(sorted(list(addresses.keys()))) @@ -2046,15 +2042,15 @@ def get_balances( """ Retrieves a mapping of account balances within the network. """ - key2address = c.key2address() - key_addresses = key_addresses or list(key2address.values()) - key_addresses = [key2address.get(a, a) for a in key_addresses] + key_addresses = key_addresses or list(c.key2address().values()) with self.get_conn(init=True) as substrate: - balances = substrate.query_multi( [substrate.create_storage_key(pallet='System', storage_function='Account', params=[ka]) for ka in key_addresses if not ka.startswith('0x')]) + storage_keys = [substrate.create_storage_key(pallet='System', storage_function='Account', params=[ka]) for ka in key_addresses if not ka.startswith('0x')] + balances = substrate.query_multi(storage_keys, block_hash=block_hash) return balances - def my_balance(self, batch_size=128, timeout=60, max_age=6000, update=False): - path = f'{self.network}/balances' + def my_balance(self, batch_size=128, timeout=120, max_age=6000, update=False, num_connections=10): + path = f'{self.network}/my_balance' + self.set_connections(num_connections=num_connections) balances = self.get(path, None, update=update, max_age=max_age) if balances == None: @@ -2088,6 +2084,9 @@ def my_balance(self, batch_size=128, timeout=60, max_age=6000, update=False): balances = {k: v for k, v in balances.items() if v > 0} balances = dict(sorted(balances.items(), key=lambda x: x[1], reverse=True)) return self.format_amount(balances, fmt='j') + + def balances(self, *args, **kwargs): + return self.my_balance(*args, **kwargs) def names( self, subnet: int = 0, extract_value: bool = False, max_age=60, update=False ) -> dict[int, str]: @@ -2100,7 +2099,6 @@ def names( names = dict(sorted(names.items(), key=lambda x: x[0])) return names - def proposal(self, proposal_id: int = 0): """ Queries the network for a specific proposal. @@ -2360,7 +2358,6 @@ def params(self, subnet = None, block_hash: str | None = None, max_age=tempo, u ("MaxAllowedValidators", params), ("ModuleBurnConfig", params), ("SubnetMetadata", params), - ("TrustRatio", params), ], "GovernanceModule": [ ("SubnetGovernanceConfig", params), @@ -2372,6 +2369,7 @@ def params(self, subnet = None, block_hash: str | None = None, max_age=tempo, u }, block_hash, ) + subnet_maps: SubnetParamsMaps = { "emission": bulk_query["SubnetEmission"], @@ -2400,7 +2398,8 @@ def params(self, subnet = None, block_hash: str | None = None, max_age=tempo, u default_subnet_map = { 'min_validator_stake': self.to_nanos(50_000), 'max_allowed_validators': 50, - 'maximum_set_weight_calls_per_epoch': 30 + 'maximum_set_weight_calls_per_epoch': 30, + 'trust_ratio': 50, } subnet_map_keys = list(subnet_maps.keys()) netuids = list(subnet_maps["name"].keys()) @@ -2516,28 +2515,45 @@ def my_modules(self, subnet="all", df = False, update=False): if subnet == "all": - return {sn: self.my_modules(subnet=sn, keys=ks, df=df) for sn, ks in self.keys_map().items()} - subnet = self.resolve_subnet(subnet) - path = f'my_modules/{self.network}/{subnet}' - modules = self.get(path, None, max_age=max_age, update=update) - namespace = c.namespace() - if modules == None: - address2key = c.address2key() - keys = keys or self.keys(subnet) - my_keys = [] - for k in keys: - if k in address2key: - my_keys += [k] - modules = self.get_modules(my_keys, subnet=subnet) - for i,m in enumerate(modules): - serving = m['name'] in namespace - m['serving'] = serving - m['name'] = address2key[m['key']] - modules[i] = m - features += ['serving'] - modules = [{f:m[f] for f in features} for m in modules] - if df: - modules = c.df(modules) + modules = [] + for sn, ks in self.keys_map().items(): + sn_modules = self.my_modules(subnet=sn, keys=ks, df=False) + for m in sn_modules: + m['subnet'] = sn + modules += [m] + if df: + modules = c.df(modules) + # modules = modules.groupb('key').agg(list).reset_index() + # modules['stake'] = modules['stake'].apply(sum) + else: + subnet = self.resolve_subnet(subnet) + path = f'my_modules/{self.network}/{subnet}' + modules = self.get(path, None, max_age=max_age, update=update) + namespace = c.namespace() + if modules == None: + address2key = c.address2key() + keys = keys or self.keys(subnet) + my_keys = [] + for k in keys: + if k in address2key: + my_keys += [k] + modules = self.get_modules(my_keys, subnet=subnet) + for i,m in enumerate(modules): + if not 'name' in m: + continue + serving = m['name'] in namespace + m['serving'] = serving + m['name'] = address2key[m['key']] + modules[i] = m + features += ['serving'] + modules = [{f:m.get(f, None) for f in features} for m in modules] + + if df: + modules = c.df(modules) + # group on key + modules = modules.groupb('key').agg(list).reset_index() + modules['stake'] = modules['stake'].apply(sum) + return modules def my_valis(self, subnet=0): @@ -2546,53 +2562,19 @@ def my_valis(self, subnet=0): def my_keys(self, subnet=0): return [m['key'] for m in self.my_modules(subnet)] + def valis(self, subnet=0, max_age=60, update=False, df=1, search=None, features=['Name', 'Keys', 'StakeFrom'], **kwargs): + valis = self.modules(subnet=subnet , max_age=max_age, features=features,update=update, **kwargs) + if search != None: + valis = [v for v in valis if search in v['name'] or search ] + if df: + valis = c.df(valis) + valis.set_index('uid', inplace=True) + del valis['stake_from'] + valis.sort_values('stake', ascending=False, inplace=True) - def all_modules(self, - max_age = tempo, - update=False, - module = "SubspaceModule", - features = ['Name', 'Address', 'Keys', - 'Weights', 'Incentive', - 'Dividends', 'Emission', - 'DelegationFee', 'LastUpdate', - 'Metadata', 'StakeFrom' ], - default_module = { - 'Weights': [], - 'DelegationFee': 30, - 'LastUpdate': -1, - }, - **kwargs): - - path = f'{self.network}/modules/all' - modules = self.get(path, None, max_age=max_age, update=update) - if modules == None: - results = self.query_batch_map({module:[(f, []) for f in features]},self.block_hash()) - results = self.process_results(results) - netuids = list(results['Keys'].keys()) - modules = {} - for _netuid in netuids: - modules[_netuid] = [] - for uid in results['Keys'][_netuid].keys(): - module = {'uid': uid} - for f in features: - module[f] = results[f].get(_netuid, {}) - if f in ['StakeFrom'] : - module_key = results['Keys'][_netuid][uid] - module[f] = results[f].get(module_key, {}) - else: - if isinstance(module[f], dict): - module[f] = module[f].get(uid, default_module.get(f, None)) - elif isinstance(module[f], list): - module[f] = module[f][uid] - module = {self.storage2name(k):v for k,v in module.items()} - modules[_netuid].append(module) - self.put(path, modules) - modules = {int(k):v for k,v in modules.items()} - return modules - + return valis - def validators(self, subnet=0): - return self.modules(subnet=subnet ) + @@ -2635,17 +2617,16 @@ def modules(self, update=False, timeout=30, module = "SubspaceModule", - features = ['Name', 'Address', 'Keys'], + features = ['Name', 'Address', 'Keys', 'Emission'], extra_features = [ 'Weights','Incentive','Dividends', 'Emission', 'DelegationFee', 'LastUpdate'], lite = True, vector_fetures = ['Incentive', 'Dividends', 'Emission'], num_connections = 4, + search=None, + df = False, default_module = {'Weights': [], 'Incentive': 0, 'Emissions': 0, 'Dividends': 0, 'DelegationFee': 30, 'LastUpdate': 0, }, **kwargs): - if subnet == 'all': - return self.all_modules(max_age=max_age, update=update, module=module, features=features, default_module=default_module, **kwargs) - subnet = self.resolve_subnet(subnet) if not lite: features += extra_features @@ -2658,10 +2639,11 @@ def modules(self, self.set_network(num_connections=num_connections) future2feature = {} params = [subnet] if subnet != None else [] - for feature in features: - params = [subnet] if subnet != None else [] - if feature in ['StakeFrom'] and lite == False: + for feature in features: + if feature in ['StakeFrom']: params = [] + else: + params = [subnet] if subnet != None else [] fn_obj = self.query if feature in vector_fetures else self.query_map f = c.submit(fn_obj, kwargs=dict(name=feature, params=params), timeout=timeout) future2feature[f] = feature @@ -2680,8 +2662,10 @@ def modules(self, if isinstance(results[f], dict): if f in ['Keys']: module[f[:-1]] = module_key + elif f in ['StakeFrom'] : module[f] = results[f].get(module_key, {}) + module['Stake'] = sum([v for k,v in module[f].items()]) / 10**9 else: module[f] = results[f].get(uid, default_module.get(f, None)) elif isinstance(results[f], list): @@ -2691,6 +2675,17 @@ def modules(self, self.put(path, modules) # modules = sorted(modules) modules = sorted(modules, key=lambda x: x["uid"]) + if 'emission' in modules[0]: + modules = sorted(modules, key=lambda x: x["emission"], reverse=True) + for i,m in enumerate(modules): + m['rank'] = i + m['emission'] = self.format_amount(m['emission'], fmt='j') + + if search: + modules = [m for m in modules if search in m['name']] + if df: + modules = c.df(modules) + return modules def root_modules(self, subnet=0, **kwargs): @@ -2742,13 +2737,7 @@ def get_modules(self, keys, subnet=0, max_age=60): futures = [ c.submit(self.get_module, kwargs=dict(module=k, subnet=subnet, max_age=max_age)) for k in keys] return c.wait(futures, timeout=30) - def get_module(self, - module, - subnet=0, - fmt='j', - mode = 'https', - block = None, - **kwargs ) -> 'ModuleInfo': + def get_module(self, module, subnet=0, fmt='j', mode = 'https', block = None, **kwargs ) -> 'ModuleInfo': url = self.get_url( mode=mode) subnet = self.resolve_subnet(subnet) module = self.resolve_key_address(module) @@ -2761,7 +2750,7 @@ def get_module(self, module['dividends'] = module['dividends'] / (U16_MAX) module['incentive'] = module['incentive'] / (U16_MAX) module['stake_from'] = {k:self.format_amount(v, fmt=fmt) for k,v in module['stake_from']} - module['stake'] = sum([v for k,v in module['stake_from'].items() ]) + module['stake'] = sum([v / 10**9 for k,v in module['stake_from'].items() ]) module['emission'] = self.format_amount(module['emission'], fmt=fmt) module['key'] = module.pop('controller', None) module['metadata'] = module.pop('metadata', {}) @@ -2793,6 +2782,18 @@ def transform_stake_dmap(self, stake_storage: dict[tuple[Ss58Address, Ss58Addres [transformed[k1].append((k2, v)) for (k1, k2), v in stake_storage.items()] return dict(transformed) + + + def miners(self, subnet=0, max_age=60, update=False): + return self.modules(subnet=subnet, max_age=max_age, update=update) + + def stats(self, subnet=0, max_age=60, update=False): + modules = c.df(self.modules(subnet=subnet, max_age=max_age, update=update)) + + return modules + + + diff --git a/commune/server.py b/commune/server.py index 9acf947e4..d66e6d1a4 100644 --- a/commune/server.py +++ b/commune/server.py @@ -10,123 +10,215 @@ import json import asyncio +class Middleware(BaseHTTPMiddleware): + def __init__(self, app, max_bytes: int): + super().__init__(app) + self.max_bytes = max_bytes + async def dispatch(self, request: Request, call_next): + content_length = request.headers.get('content-length') + if content_length: + if int(content_length) > self.max_bytes: + return JSONResponse(status_code=413, content={"error": "Request too large"}) + body = await request.body() + if len(body) > self.max_bytes: + return JSONResponse(status_code=413, content={"error": "Request too large"}) + response = await call_next(request) + return response + class Server(c.Module): tag_seperator:str='::' + user_data_lifetime = 3600 pm2_dir = os.path.expanduser('~/.pm2') - functions_attributes =['helper_functions', 'whitelist','endpoints','functions', 'fns', 'server_functions', 'public'] - helper_functions = ['info', 'metadata', 'schema', 'name', 'functions','key_address', 'crypto_type','fns', 'forward', 'rate_limit'] - max_bytes:int = 10 * 1024 * 1024 # max bytes within the request (bytes) - allow_origins = ["*"] # allowed origins - allow_credentials =True # allow credentials - allow_methods = ["*"] # allowed methods - allow_headers = ["*"] # allowed headers period : int = 3600 # the period for max_request_staleness : int = 4 # (in seconds) the time it takes for the request to be too old max_network_staleness: int = 60 # (in seconds) the time it takes for. the network to refresh def __init__( self, + ### CORE PARAMETERS module: Union[c.Module, object] = None, - functions:Optional[List[str]] = None, # list of endpoints key:str = None, # key for the server (str) name: str = None, # the name of the server + functions:Optional[List[Union[str, callable]]] = None, # list of endpoints port: Optional[int] = None, # the port the server is running on network:str = 'subspace', # the network used for incentives fn2cost : Dict[str, float] = None, # the cost of the function - serializer = 'serializer', + free : bool = False, kwargs : dict = None, # the kwargs for the module + crypto_type = 'sr25519', # the crypto type of the key + users_path: Optional[str] = None, # the path to the user data + serializer: str = 'serializer', # the serializer used for the data ) -> 'Server': - - functions = functions or [] module = module or 'module' + kwargs = kwargs or {} if self.tag_seperator in name: # module::fam -> module=module, name=module::fam key=module::fam (default) module, tag = name.split(self.tag_seperator) + module = c.module(module)(**kwargs) if isinstance(module, str): name = name or module - module_class = c.module(module) - kwargs = kwargs or {} - module = module_class(**kwargs) - module.name = name - module.key = c.get_key(key or module.name, create_if_not_exists=True) - module.key_address = module.key.ss58_address - module.crypto_type = module.key.crypto_type - if not hasattr(module, 'fn2cost'): - module.fn2cost = fn2cost or {} - functions = sorted(list(set(functions + self.helper_functions))) - schema = {} + module = c.module(module)(**kwargs) + # NOTE: ONLY ENABLE FREEMODE IF YOU ARE ON A CLOSED NETWORK, + self.serializer = c.module(serializer)() + self.module = module + self.set_name(name) + self.set_key(key=key, crypto_type=crypto_type) + self.set_port(port) + self.set_network(network) + self.set_functions(functions=functions, fn2cost=fn2cost, free=free) + self.set_user_path(users_path) + self.start_server() + + def set_user_path(self, users_path): + self.users_path = users_path or self.resolve_path(f'users/{self.module.name}') + + def set_name(self, name): + self.module.name = name + return {'success':True, 'message':f'Set name to {name}'} + def set_functions(self, + functions:Optional[List[str]] , + fn2cost=None, + helper_functions = ['info', 'metadata', 'schema', 'free', 'name', 'functions','key_address', 'crypto_type','fns', 'forward', 'rate_limit'], + functions_attributes =['helper_functions', 'whitelist', "whitelist_functions", 'endpoints', 'functions', 'fns', "exposed_functions",'server_functions', 'public_functions'], + free = False + ): + + + self.free = free + if self.free: + c.print('FREE MODE ENABLED', color='red') + functions = functions or [] + for i, fn in enumerate(functions): + if callable(fn): + print('Adding function', fn) + setattr(self, fn.__name__, fn) + functions[i] = fn.__name__ + + functions = sorted(list(set(functions + helper_functions))) + module = self.module functions = functions or [] - for k in self.functions_attributes: + for k in functions_attributes: if hasattr(module, k): - fn_obj = getattr(module, k) - if isinstance(fn_obj, list): - functions += fn_obj + function_addributes = getattr(module, k) + if isinstance(function_addributes, list): + functions += function_addributes # get function decorators form c.endpoint() + for f in dir(module): try: if hasattr(getattr(module, f), '__metadata__'): functions.append(f) except Exception as e: c.print(f'Error in get_endpoints: {e} for {f}') + module.functions = sorted(list(set(functions))) + + ## get the schema for the functions + schema = {} for fn in functions : if hasattr(module, fn): fn_obj = getattr(module, fn ) if callable(fn_obj): - schema[fn] = c.fn_schema(fn_obj)['input'] + schema[fn] = c.schema(fn_obj) else: schema[fn] = {'type': str(type(fn_obj)).split("'")[1]} + module.schema = dict(sorted(schema.items())) + module.address = c.ip() + ':' + str(module.port) + + + + if not hasattr(module, 'fn2cost'): + module.fn2cost = fn2cost or {} + + + ### get the info for the module + module.info = { + "functions": functions, + "schema": schema, + "name": module.name, + "address": module.address, + "key": module.key.ss58_address, + "crypto_type": module.key.crypto_type, + "fn2cost": module.fn2cost, + } + + + + def set_user_path(self, users_path): + self.users_path = users_path or self.resolve_path(f'users/{self.module.name}') + + def set_key(self, key, crypto_type): + module = self.module + module.key = c.get_key(key or module.name, create_if_not_exists=True, crypto_type=crypto_type) + module.key_address = module.key.key_address + module.crypto_type = module.key.crypto_type + return {'success':True, 'message':f'Set key to {module.key.ss58_address}'} + + + def start_server(self, + max_bytes = 10 * 1024 * 1024 , # max bytes within the request (bytes) + allow_origins = ["*"], # allowed origins + allow_credentials =True, # allow credentials + allow_methods = ["*"], # allowed methods + allow_headers = ["*"] , # allowed headers + ): + module = self.module + c.thread(self.sync_loop) + self.loop = asyncio.get_event_loop() + app = FastAPI() + app.add_middleware(Middleware, max_bytes=max_bytes) + app.add_middleware(CORSMiddleware, + allow_origins=allow_origins, + allow_credentials=allow_credentials, + allow_methods=allow_methods, + allow_headers=allow_headers) + def api_forward(fn:str, request: Request): + return self.forward(fn, request) + app.post("/{fn}")(api_forward) + c.print(f'Served(name={module.name}, address={module.address}, key={module.key.key_address})', color='purple') + c.print(c.register_server(name=module.name, address=module.address, key=module.key.ss58_address)) + self.module = module + uvicorn.run(app, host='0.0.0.0', port=module.port, loop='asyncio') + + def set_port(self, port:Optional[int]=None): + module = self.module + name = module.name + if port in [None, 'None']: namespace = c.namespace() if name in namespace: + c.kill(name) try: port = int(namespace.get(module.name).split(':')[-1]) except: port = c.free_port() else: port = c.free_port() - if c.port_used(port): - port = c.free_port() - module.ip = c.ip() - module.port = port or c.free_port() - module.address = f"{module.ip}:{module.port}" - module.functions = functions - module.schema = dict(sorted(schema.items())) - module.info = self.info - self.network = network - self.network_path = self.resolve_path(f'networks/{self.network}/state.json') - self.users_path = self.resolve_path(f'users/{name}') - self.serializer = c.module(serializer)() - self.sync(update=False) - c.thread(self.sync_loop) - self.loop = asyncio.get_event_loop() - app = FastAPI() - app.add_middleware(self.Middleware, max_bytes=self.max_bytes) - app.add_middleware(CORSMiddleware, - allow_origins=self.allow_origins, - allow_credentials=self.allow_credentials, - allow_methods=self.allow_methods, - allow_headers=self.allow_headers) - def api_forward(fn:str, request: Request): - return self.forward(fn, request) - app.post("/{fn}")(api_forward) - c.print(f'Served(name={module.name}, address={module.address}, key={module.key}) 🚀 ', color='purple') - c.register_server(name=module.name,address=module.address, key=module.key.ss58_address) - self.module = module - uvicorn.run(app, host='0.0.0.0', port=module.port, loop='asyncio') + while c.port_used(port): + c.kill_port(port) + c.sleep(1) + print(f'Waiting for port {port} to be free') + + module.port = port + module.address = c.ip() + ':' + str(module.port) + self.module = module + return {'success':True, 'message':f'Set port to {port}'} + + def is_admin(self, address): + return c.is_admin(address) def rate_limit(self, address:str, - fn: str= 'info', - multipliers : Dict[str, float] = {'stake': 1, 'stake_to': 1,'stake_from': 1}, - rates : Dict[str, int]= {'max': 10, 'local': 10000, 'stake2rate': 1000, 'admin': 10000}, # the maximum rate - ) -> float: + fn: str= 'info', + multipliers : Dict[str, float] = {'stake': 1, 'stake_to': 1,'stake_from': 1}, + rates : Dict[str, int]= {'max': 10, 'local': 10000, 'stake2rate': 1000, 'admin': 10000}, # the maximum rate + ) -> float: # stake rate limit module = self.module if c.is_admin(address) or address == module.key.ss58_address: return rates['admin'] - if address in self.state['address2key']: + if address in self.address2key: return rates['local'] stake_score = self.state['stake'].get(address, 0) + multipliers['stake'] stake_to_score = (sum(self.state['stake_to'].get(address, {}).values())) * multipliers['stake_to'] @@ -134,6 +226,27 @@ def rate_limit(self, stake = stake_score + stake_to_score + stake_from_score rates['stake2rate'] = rates['stake2rate'] * module.fn2cost.get(fn, 1) return min((stake / rates['stake2rate']), rates['max']) + + + def serialize(self, data): + return self.serializer.serialize(data) + + def deserialize(self, data): + return self.serializer.deserialize(data) + + def verify_request(self, fn:str, data:dict, headers:dict ): + if self.free: + assert fn in self.module.functions , f"Function {fn} not in endpoints={self.module.functions}" + return True + request_staleness = c.time() - float(headers['time']) + assert request_staleness < self.max_request_staleness, f"Request is too old ({request_staleness}s > {self.max_request_staleness}s (MAX)" + auth={'data': c.hash(data), 'time': headers['time']} + signature = headers['signature'] + rate_limit = self.rate_limit(fn=fn, address=headers['key']) + count = self.user_count(headers['key']) + assert count <= rate_limit, f'rate limit exceeded {count} > {rate_limit}' + assert c.verify(auth=auth,signature=signature, address=headers['key']), 'Invalid signature' + return True def forward(self, fn:str, request: Request, catch_exception:bool=True) -> dict: if catch_exception: @@ -142,69 +255,48 @@ def forward(self, fn:str, request: Request, catch_exception:bool=True) -> dict: except Exception as e: return c.detailed_error(e) module = self.module - - headers = dict(request.headers.items()) - address = headers.get('key', headers.get('address', None)) - assert address, 'No key or address in headers' - request_staleness = c.time() - float(headers['time']) - assert request_staleness < self.max_request_staleness, f"Request is too old ({request_staleness}s > {self.max_request_staleness}s (MAX)" data = self.loop.run_until_complete(request.json()) - data = self.serializer.deserialize(data) - request = {'data': data, 'headers': headers} - auth={'data': c.hash(data), 'time': headers['time']} - signature = headers.get('signature', None) - assert c.verify(auth=auth,signature=signature, address=address), 'Invalid signature' - server_signature = module.key.sign(headers) - kwargs = dict(data.get('kwargs', {})) + # data = self.serializer.deserialize(data) + kwargs = dict(data.get('kwargs', data.get('params', {}))) args = list(data.get('args', [])) data = {'args': args, 'kwargs': kwargs} - is_admin = bool(c.is_admin(address) or address == self.module.key.ss58_address) - if not is_admin: - assert not bool(fn.startswith('__') or fn.startswith('_')), f'Function {fn} is private' - assert fn in module.functions , f"Function {fn} not in endpoints={module.functions}" - count = self.user_count(address) - rate_limit = self.rate_limit(fn=fn, address=address) - assert count <= rate_limit, f'rate limit exceeded {count} > {rate_limit}' - fn_obj = getattr(self.module, fn) - if is_admin: - RANK = 'ADMIN' - elif address in self.state['address2key']: - RANK = 'LOCAL' - else: - RANK = 'NA' - start_time = float(headers['time']) + headers = dict(request.headers.items()) + headers['key'] = headers.get('key', headers.get('address', None)) + is_admin = bool(c.is_admin(headers['key'])) + is_owner = bool(headers['key'] == module.key.ss58_address) + self.verify_request(fn=fn, data=data, headers=headers) + if hasattr(module, fn): + fn_obj = getattr(module, fn) + elif (is_admin or is_owner) and hasattr(self, fn): + # only the admin can control the server + fn_obj = getattr(module, fn) + start_time = float(headers.get('time', c.time())) result = fn_obj(*data['args'], **data['kwargs']) if callable(fn_obj) else fn_obj end_time = c.time() latency = c.round(end_time - start_time, 3) + if c.is_generator(result): - output = [] def generator_wrapper(generator): try: for item in generator: - output_item = self.serializer.serialize(item) - yield output_item + yield item except Exception as e: - c.print(e) yield str(c.detailed_error(e)) result = EventSourceResponse(generator_wrapper(result)) else: output = self.serializer.serialize(result) - - user_data = { - 'module': module.name, - 'fn': fn, - 'input': data, # the data of the request - 'output': output, # the response - 'latency': latency, # the latency - 'time': start_time, # the time of the request - 'user_key': address, # the key of the user - 'server_key': module.key.ss58_address, # the key of the server - 'user_signature': signature, # the signature of the user - 'server_signature': server_signature, # the signature of the server - 'cost': module.fn2cost.get(fn, 1), # the cost of the function - } - user_path = self.user_path(user_data["user_key"]) + f'/{user_data["fn"]}/{c.time()}.json' # get the user info path - c.put(user_path, user_data) + if not self.free: + user_data = { + 'fn': fn, + 'input': data, # the data of the request + 'output': output, # the response + 'time': start_time, # the time of the request + 'latency': latency, # the latency of the request + 'key': headers['key'], # the key of the user + 'cost': module.fn2cost.get(fn, 1), # the cost of the function + } + user_path = self.user_path(f'{user_data["key"]}/{user_data["fn"]}/{c.time()}.json') + c.put(user_path, user_data) return result def sync_loop(self, sync_loop_initial_sleep=4): @@ -217,10 +309,19 @@ def sync_loop(self, sync_loop_initial_sleep=4): c.print('Error in sync_loop -->', r, color='red') c.sleep(self.max_network_staleness) - def sync(self, update=True ): - t0 = c.time() + def set_network(self, network): + self.network = network + self.network_path = self.resolve_path(f'networks/{self.network}/state.json') + c.thread(self.sync_loop) + # self.sync() + return {'success':True, 'message':f'Set network to {network}', 'network':network, 'network_path':self.network_path} + + def sync(self, update=True , state_keys = ['stake_from', 'stake_to']): + self.network_path = self.resolve_path(f'networks/{self.network}/state.json') + print('SYNCING NETWORK') + if hasattr(self, 'state'): - latency = c.time() - self.state['time'] + latency = c.time() - self.state.get('time', 0) if latency < self.max_network_staleness: return {'msg': 'state is fresh'} max_age = self.max_network_staleness @@ -228,11 +329,11 @@ def sync(self, update=True ): state = self.get(network_path, {}, max_age=max_age, updpate=update) network = self.network state = {} - state['address2key'] = c.address2key() - state['key2address'] = {v:k for k,v in state['address2key'].items()} + self.address2key = c.address2key() state['stake'] = {} state['stake_to'] = {} state['stake_from'] = {} + if update: try : c.namespace(max_age=max_age) @@ -242,9 +343,6 @@ def sync(self, update=True ): state['stake'] = {k: sum(v.values()) for k,v in state['stake_from'].items()} except Exception as e: c.print(f'Error {e} while syncing network--> {network}') - state['time'] = c.time() - state['latency'] = state['time'] - t0 - state_keys = ['stake_from', 'stake_to', 'address2key', 'stake', 'key2address', 'time', 'latency'] is_valid_state = lambda x: all([k in x for k in state_keys]) assert is_valid_state(state), f'Format for network state is {[k for k in state_keys if k not in state]}' self.put(network_path, state) @@ -268,26 +366,17 @@ def wait_for_server(cls, if name in namespace: try: result = c.call(namespace[name]+'/info') + print(result) if 'key' in result: c.print(f'{name} is running', color='green') - return result + return result except Exception as e: c.print(f'Error getting info for {name} --> {e}', color='red') c.sleep(sleep_interval) time_waiting += sleep_interval raise TimeoutError(f'Waited for {timeout} seconds for {name} to start') - def info(self, crypto_type: str = 'sr25519', **kwargs) -> dict: - info = {} - module = self.module - module.address = c.ip() + ':' + str(module.port) - - info['schema'] = module.schema - info['name'] = module.name - info['address'] = module.address - info['key'] = module.key.ss58_address - info['crypto_type'] = module.key.crypto_type - return info + def add_endpoint(self, name, fn): setattr(self, name, fn) @@ -312,7 +401,7 @@ def endpoint(cls, def decorator_fn(fn): metadata = { - **c.fn_schema(fn), + **c.schema(fn), 'cost': cost, 'rate_limit': rate_limit, 'user2rate': user2rate, @@ -329,20 +418,7 @@ def decorator_fn(fn): serverfn = endpoint - class Middleware(BaseHTTPMiddleware): - def __init__(self, app, max_bytes: int): - super().__init__(app) - self.max_bytes = max_bytes - async def dispatch(self, request: Request, call_next): - content_length = request.headers.get('content-length') - if content_length: - if int(content_length) > self.max_bytes: - return JSONResponse(status_code=413, content={"error": "Request too large"}) - body = await request.body() - if len(body) > self.max_bytes: - return JSONResponse(status_code=413, content={"error": "Request too large"}) - response = await call_next(request) - return response + @classmethod def kill(cls, name:str, verbose:bool = True, **kwargs): @@ -415,7 +491,7 @@ def logs(cls, try: text += c.get_text(path, tail=tail) except Exception as e: - c.c.print('ERROR GETTING LOGS -->' , e) + c.print('ERROR GETTING LOGS -->' , e) continue return text elif mode == 'cmd': @@ -478,7 +554,7 @@ def launch(cls, @classmethod def restart(cls, name:str): assert name in cls.processes() - c.c.print(f'Restarting {name}', color='cyan') + c.print(f'Restarting {name}', color='cyan') c.cmd(f"pm2 restart {name}", verbose=False) cls.rm_logs(name) return {'success':True, 'message':f'Restarted {name}'} @@ -513,27 +589,22 @@ def serve(cls, remote:bool = True, # runs the server remotely (pm2, ray) functions = None, # list of functions to serve, if none, it will be the endpoints of the module key = None, # the key for the server + free = False, + cwd = None, **extra_kwargs ): module = module or 'module' name = name or module kwargs = {**(kwargs or {}), **extra_kwargs} - c.print(f'Serving(module={module} params={kwargs} name={name} function={functions})') - if not isinstance(module, str): remote = False if remote: rkwargs = {k : v for k, v in c.locals2kwargs(locals()).items() if k not in ['extra_kwargs', 'response', 'namespace']} rkwargs['remote'] = False - c.remote_fn('serve', name=name, kwargs=rkwargs) + cls.launch('serve', name=name, kwargs=rkwargs, cwd=cwd) return cls.wait_for_server(name) - return Server(module=module, - name=name, - port=port, - key=key, - functions = functions, - kwargs=kwargs) + return Server(module=module, name=name, functions = functions, kwargs=kwargs, port=port, key=key, free = free) @classmethod @@ -544,14 +615,9 @@ def fleet(cls, module, n:int = 1, **kwargs): future = c.submit(c.serve, dict(module=module, name = module + '::' + str(_), **kwargs)) futures.append(future) for future in c.as_completed(futures): - c.c.print(future.result()) + c.print(future.result()) return {'success':True, 'message':f'Served {n} servers', 'namespace': c.namespace()} - def check_all_users(self): - for user in self.users(): - c.print('Checking', user) - self.chekcer_user_data() - def extract_time(self, x): try: x = float(x.split('/')[-1].split('.')[0]) @@ -576,16 +642,23 @@ def user_paths(self, address ): user_paths = c.glob(self.user_path(address)) return sorted(user_paths, key=self.extract_time) - def user_data(self, address): - for i, user_path in enumerate(self.user_paths(address)): - yield c.get(user_path) + def user_data(self, address, stream=False): + user_paths = self.user_paths(address) + if stream: + def stream_fn(): + for user_path in user_paths: + yield c.get(user_path) + return stream_fn() - def user_path(self, address): - return self.users_path + '/' + address + else: + return [c.get(user_path) for user_path in user_paths] + + def user_path(self, key_address): + return self.users_path + '/' + key_address - def user_count(self, address): - self.check_user_data(address) - return len(self.user_paths(address)) + def user_count(self, user): + self.check_user_data(user) + return len(self.user_paths(user)) def user_path2time(self, address): user_paths = self.user_paths(address) @@ -601,9 +674,9 @@ def user_path2latency(self, address): def check_user_data(self, address): path2latency = self.user_path2latency(address) for path, latency in path2latency.items(): - if latency > self.period: + if latency > self.user_data_lifetime: c.print(f'Removing stale path {path} ({latency}/{self.period})') - os.remove(path) - + if os.path.exists(path): + os.remove(path) Server.run(__name__) diff --git a/commune/utils/misc.py b/commune/utils/misc.py index 0f8517f60..0d64b7418 100644 --- a/commune/utils/misc.py +++ b/commune/utils/misc.py @@ -44,8 +44,10 @@ def get_files( path ='./', paths = [p for p in paths if search in p] return paths -def file2text(path = './', - avoid_terms = ['__pycache__', +def abspath(path:str): + return os.path.abspath(os.path.expanduser(path)) + +def file2text(path = './', avoid_terms = ['__pycache__', '.git', '.ipynb_checkpoints', 'package.lock', @@ -56,8 +58,11 @@ def file2text(path = './', 'cache/', 'target/debug', 'node_modules'], + avoid_paths = ['~', '/tmp', '/var', '/proc', '/sys', '/dev'], relative=True, **kwargs): + path = os.path.abspath(os.path.expanduser(path)) + assert all([not os.path.abspath(k) in path for k in avoid_paths]), f'path {path} is in avoid_paths' file2text = {} for file in get_files(path, recursive=True, avoid_terms=avoid_terms , **kwargs): if os.path.isdir(file): @@ -72,15 +77,10 @@ def file2text(path = './', return {k[len(path)+1:]:v for k,v in file2text.items()} return file2text - -def abspath( path:str): - return os.path.abspath(os.path.expanduser(path)) - def random_int(start_value=100, end_value=None): if end_value == None: end_value = start_value start_value, end_value = 0 , start_value - assert start_value != None, 'start_value must be provided' assert end_value != None, 'end_value must be provided' return random.randint(start_value, end_value) @@ -156,6 +156,7 @@ def dict2munch( x:dict, recursive:bool=True)-> 'Munch': x = Munch(x) return x + def munch2dict( x:'Munch', recursive:bool=True)-> dict: from munch import Munch ''' diff --git a/commune/utils/os.py b/commune/utils/os.py index 7225f3191..ff92e0869 100644 --- a/commune/utils/os.py +++ b/commune/utils/os.py @@ -1205,6 +1205,10 @@ def get_port(port:int = None)->int: def port_range(): return get_port_range() +def ports() -> List[int]: + + return list(range(*get_port_range())) + def resolve_port_range(port_range: list = None) -> list: return get_port_range(port_range) diff --git a/docs/_intro.md b/docs/0__intro.md similarity index 100% rename from docs/_intro.md rename to docs/0__intro.md diff --git a/docs/_install.md b/docs/1__install.md similarity index 100% rename from docs/_install.md rename to docs/1__install.md diff --git a/docs/docs.py b/docs/docs.py index 4fd9335e0..4a227ea46 100644 --- a/docs/docs.py +++ b/docs/docs.py @@ -13,4 +13,6 @@ def ask(self, *question, CONTEXT: {context} """ - return c.ask(prompt, model=model, **kwargs) \ No newline at end of file + return c.ask(prompt, model=model, **kwargs) + + \ No newline at end of file diff --git a/docs/server.md b/docs/server.md index 51da63f02..c7fe7b0c6 100644 --- a/docs/server.md +++ b/docs/server.md @@ -61,7 +61,7 @@ c call demo/info You can restart or kill a served module using the `restart()` and `kill()` methods: ```python -c.restart('demo') # Restart the module which will run back on the same port +c.restart('demo') # Restart the module which will run bxack on the same port ``` diff --git a/modules/agent/agent.py b/modules/agent/agent.py new file mode 100644 index 000000000..1cd97532c --- /dev/null +++ b/modules/agent/agent.py @@ -0,0 +1,13 @@ +import commune as c +import json +import os + +class AgentCondense(c.Module): + description = "This module is used to find files and modules in the current directory" + anchor="OUTPUT" + + def __init__(self, prompt=None, model=None, anchor=None): + super().__init__(prompt, model, anchor) + self.prompt = prompt + self.model = model + self.anchor = anchor \ No newline at end of file diff --git a/modules/anthropic/anthropic.py b/modules/anthropic/anthropic.py new file mode 100644 index 000000000..c14711769 --- /dev/null +++ b/modules/anthropic/anthropic.py @@ -0,0 +1,92 @@ +import commune as c + +class Anthropic(c.Module): + """ + Anthropic module for managing Claude API interactions within the commune framework + """ + + def __init__(self, + api_key: str = None, + model: str = 'claude-3-sonnet', + max_tokens: int = 4096, + temperature: float = 0.7): + """Initialize the Anthropic module + + Args: + api_key (str): Anthropic API key + model (str): Model to use (default: claude-3-sonnet) + max_tokens (int): Maximum tokens for completion + temperature (float): Sampling temperature + """ + self.set_config(locals()) + self.api_key = api_key or c.get_api_key('anthropic') + + def call(self, + prompt: str, + system: str = None, + stream: bool = False, + **kwargs) -> str: + """Call the Anthropic API + + Args: + prompt (str): Input prompt + system (str): System message + stream (bool): Whether to stream response + **kwargs: Additional arguments passed to API + + Returns: + str: Model response + """ + try: + # Import anthropic here to avoid dependency issues + import anthropic + + client = anthropic.Anthropic(api_key=self.api_key) + + message = client.messages.create( + model=self.model, + max_tokens=self.max_tokens, + temperature=self.temperature, + system=system, + messages=[ + {"role": "user", "content": prompt} + ], + stream=stream, + **kwargs + ) + + if stream: + response = "" + for chunk in message: + if chunk.content: + response += chunk.content[0].text + if hasattr(self, 'verbose') and self.verbose: + c.print(chunk.content[0].text, end='') + return response + + return message.content[0].text + + except Exception as e: + c.print(f"Error calling Anthropic API: {str(e)}") + return str(e) + + def test(self): + """Test the Anthropic module""" + prompt = "Write a haiku about AI" + response = self.call(prompt) + c.print(f"Prompt: {prompt}") + c.print(f"Response: {response}") + return response + + def schema(self): + """Return the module's schema""" + return { + "call": { + "input": { + "prompt": "str", + "system": "str", + "stream": "bool" + }, + "output": "str" + } + } \ No newline at end of file diff --git a/commune/modules/app/app.py b/modules/app/app.py similarity index 99% rename from commune/modules/app/app.py rename to modules/app/app.py index b4165bd7c..869fc5b14 100644 --- a/commune/modules/app/app.py +++ b/modules/app/app.py @@ -4,7 +4,6 @@ import streamlit as st from typing import * - class App(c.Module): name_prefix = 'app::' diff --git a/commune/modules/base/base.py b/modules/base/base.py similarity index 100% rename from commune/modules/base/base.py rename to modules/base/base.py diff --git a/modules/builder/builder.py b/modules/builder/builder.py new file mode 100644 index 000000000..245604f68 --- /dev/null +++ b/modules/builder/builder.py @@ -0,0 +1,109 @@ +import commune as c +import time +import os +# import agent as h + +class Builder: + anchor = 'OUTPUT' + + def __init__(self, + model = None, + key = None, + **kwargs): + + self.model = c.module('model.openrouter')(model=model) + self.models = self.model.models() + self.key = c.get_key(key) + + def process_text(self, text): + for ch in text.split(' '): + + if len(ch) > 0 and ch[0] in ['.', '/', '~'] and os.path.exists(ch): + text = text.replace(ch, str(c.file2text(ch))) + return text + + def build(self, + text = 'build a frontend', + *extra_text, + temperature= 0.5, + max_tokens= 1000000, + model= 'anthropic/claude-3.5-sonnet', + path = None, + stream=True + ): + + + prompt = f""" + -- SYSTEM -- + YOU ARE A CODER, YOU ARE MR.ROBOT, YOU ARE TRYING TO BUILD IN A SIMPLE + LEONARDO DA VINCI WAY, YOU ARE A agent, YOU ARE A GENIUS, YOU ARE A STAR, + YOU FINISH ALL OF YOUR REQUESTS WITH UTMOST PRECISION AND SPEED, YOU WILL ALWAYS + MAKE SURE THIS WORKS TO MAKE ANYONE CODE. YOU HAVE THE CONTEXT AND INPUTS FOR ASSISTANCE + - Please use to name the repository and + - This is a a full repository construction and please + - INCLUDE A README.md AND a scripts folder with the build.sh + - file to build hte environment in docker and a run.sh file + - to run the environment in docker + - INCLUDE A TESTS folder for pytest + -- OUTPUT FORMAT -- + <{self.anchor}(path/to/file)> # start of file + FILE CONTENT + # end of file + """ + if len(extra_text) > 0: + text = ' '.join(list(map(str, [text] +list(extra_text)))) + + text = self.process_text(text) + output = self.model.generate( prompt + '\n' + text, + stream=stream, + model=model, + max_tokens=max_tokens, + temperature=temperature ) + if path == None: + return output + return self.process_output(output, path=path) + + def process_output(self, response, path=None): + # generator = self.search_output('app') + if path == None: + return response + if not os.path.exists(path): + os.makedirs(path, exist_ok=True) + assert os.path.exists(path), f'Path does not exist: {path}' + path = os.path.abspath(path) + buffer = '-------------' + anchors = [f'<{self.anchor}(', f'')[0] + file_content = content.split(anchors[0] +file_path + ')>')[1].split(anchors[1])[0] + c.put_text(path + '/' + file_path, file_content) + c.print(buffer,'Writing file --> ', file_path, buffer, color=color) + content = '' + color = c.random_color() + return {'path': path, 'msg': 'File written successfully'} + + def prompt_args(self): + # get all of the names of the variables in the prompt + prompt = self.prompt + variables = [] + for line in prompt.split('\n'): + if '{' in line and '}' in line: + variable = line.split('{')[1].split('}')[0] + variables.append(variable) + return list(set(variables)) + + + def utils_path(self): + return os.path.dirname(__file__) + '/utils.py' + + + def utils(self): + return c.find_functions(self.utils_path()) + + \ No newline at end of file diff --git a/commune/modules/chat/app.py b/modules/chat/app.py similarity index 100% rename from commune/modules/chat/app.py rename to modules/chat/app.py diff --git a/modules/chat/chat.py b/modules/chat/chat.py new file mode 100644 index 000000000..bbf882306 --- /dev/null +++ b/modules/chat/chat.py @@ -0,0 +1,93 @@ +import commune as c +import os +import json + +class Chat(c.Module): + description = "This module is used to chat with an AI assistant" + def __init__(self, + max_tokens=420000, + prompt = 'The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly.', + model = None, + **kwargs): + + self.max_tokens = max_tokens + self.prompt = prompt + self.model = c.module('model.openrouter')(model=model, **kwargs) + + def generate(self, text = 'whats 2+2?' , model= 'anthropic/claude-3.5-sonnet', temperature= 0.5, max_tokens= 1000000,stream=True, ): + text = self.process_text(text) + return self.model.generate(text, stream=stream, model=model, max_tokens=max_tokens,temperature=temperature ) + + forward = generate + + def ask(self, *text, **kwargs): + text = ' '.join(list(map(str, text))) + return self.generate(text, **kwargs) + + def process_text(self, text, threshold=1000): + new_text = '' + for word in text.split(' '): + conditions = { + "file": any([word.startswith(ch) for ch in ['.', '~', '/']]), + "module": word.startswith('c_') and c.module_exists(word.split('_')[1]), + } + if conditions['file']: + word = c.file2text(word) + new_text += str(word) + return new_text + + def reduce(self, text, max_chars=10000 , timeout=5, max_age=30, model='openai/o1-mini'): + + if os.path.exists(text): + text = str(c.file2text(text)) + elif c.module_exists(text): + text = c.code(text) + + original_length = len(text) + code_hash = c.hash(text) + path = f'summary/{code_hash}' + + text = f''' + GOAL + summarize the following into tupples and make sure you compress as much as oyu can + CONTEXT + {text} + OUTPUT FORMAT ONLY BETWEEN THE TAGS SO WE CAN PARSE + DICT(data=List[Dict[str, str]]) + ''' + print(f"TEXTSIZE : {len(text)}") + compress_ratio = 0 + text_size = len(text) + if len(text) >= max_chars * 2 : + batch_text = [text[i:i+max_chars] for i in range(0, len(text), max_chars)] + print(f"TEXTSIZE : {text_size} > {max_chars} BATCH SIZE: {len(batch_text)}") + futures = [c.submit(self.reduce, [batch], timeout=timeout) for batch in batch_text] + text = '' + cnt = 0 + try: + n = len(batch_text) + progress = c.progress(n) + + for future in c.as_completed(futures, timeout=timeout): + text += str(future.result()) + cnt += 1 + progress.update(1) + print(f"SUMMARIZED: {cnt}/{n} COMPRESSION_RATIO: {compress_ratio}") + return text + except Exception as e: + print(e) + + final_length = len(text) + compress_ratio = final_length/original_length + result = { 'compress_ratio': compress_ratio, 'final_length': final_length, 'original_length': original_length} + print(result) + return text + if "'''" in text: + text = text.replace("'''", '"""') + + data = c.ask(text, model=model, stream=0) + return data + + def models(self): + return self.model.models() + \ No newline at end of file diff --git a/commune/modules/chat/history.py b/modules/chat/history.py similarity index 100% rename from commune/modules/chat/history.py rename to modules/chat/history.py diff --git a/commune/modules/docker/docker.py b/modules/docker/docker.py similarity index 79% rename from commune/modules/docker/docker.py rename to modules/docker/docker.py index 74b355d1a..8ad345bdf 100644 --- a/commune/modules/docker/docker.py +++ b/modules/docker/docker.py @@ -4,15 +4,13 @@ from typing import List, Dict, Union import commune as c -class Docker(c.Module): +class Docker: - @classmethod - def dockerfile(cls, path = c.repo_path): + def dockerfile(self, path = c.repo_path): path = [f for f in c.ls(path) if f.endswith('Dockerfile')][0] return c.get_text(path) - @classmethod - def resolve_repo_path(cls, path): + def resolve_repo_path(self, path): if path is None: path = c.repo_path else: @@ -22,23 +20,22 @@ def resolve_repo_path(cls, path): path = os.path.abspath(path) return path - @classmethod - def resolve_docker_compose_path(cls,path = None): - path = cls.resolve_repo_path(path) + def resolve_docker_compose_path(self,path = None): + path = self.resolve_repo_path(path) return [f for f in c.ls(path) if 'docker-compose' in os.path.basename(f)][0] - @classmethod - def docker_compose(cls, path = c.repo_path): - docker_compose_path = cls.resolve_docker_compose_path(path) + + def docker_compose(self, path = c.repo_path): + docker_compose_path = self.resolve_docker_compose_path(path) return c.load_yanl(docker_compose_path) - @classmethod - def resolve_docker_path(cls, path = None): - path = cls.resolve_repo_path(path) + + def resolve_docker_path(self, path = None): + path = self.resolve_repo_path(path) return [f for f in c.ls(path) if 'Dockerfile' in os.path.basename(f)][0] - @classmethod - def build(cls, path = None , tag = None , sudo=False, verbose=True, no_cache=False, env={}): + + def build(self, path = None , tag = None , sudo=False, verbose=True, no_cache=False, env={}): path = c.resolve_path(path) if tag is None: @@ -48,40 +45,40 @@ def build(cls, path = None , tag = None , sudo=False, verbose=True, no_cache=Fal if no_cache: cmd += ' --no-cache' return c.cmd(cmd, sudo=sudo, env=env,cwd=os.path.dirname(path), verbose=verbose) - @classmethod - def kill(cls, name, sudo=False, verbose=True, prune=False): + + def kill(self, name, sudo=False, verbose=True, prune=False): c.cmd(f'docker kill {name}', sudo=sudo, verbose=verbose) c.cmd(f'docker rm {name}', sudo=sudo, verbose=verbose) if prune: c.cmd('docker container prune', sudo=sudo, verbose=verbose) return {'status': 'killed', 'name': name} - @classmethod - def kill_many(cls, name, sudo=False, verbose=True): - servers = cls.ps(name) + + def kill_many(self, name, sudo=False, verbose=True): + servers = self.ps(name) for server in servers: - cls.kill(server, sudo=sudo, verbose=verbose) + self.kill(server, sudo=sudo, verbose=verbose) c.print(f'killed {server}', verbose=verbose) return {'status': 'killed', 'name': name} - @classmethod - def kill_all(cls, sudo=False, verbose=True): - servers = cls.ps() + + def kill_all(self, sudo=False, verbose=True): + servers = self.ps() for server in servers: - cls.kill(server, sudo=sudo, verbose=verbose) + self.kill(server, sudo=sudo, verbose=verbose) c.print(f'killed {server}', verbose=verbose) return {'status': 'killed'} - @classmethod - def rm(cls, name, sudo=False, verbose=True): + + def rm(self, name, sudo=False, verbose=True): c.cmd(f'docker rm {name}', sudo=sudo, verbose=verbose) return {'status': 'removed', 'name': name} - @classmethod - def exists(cls, name:str): - return name in cls.ps() + + def exists(self, name:str): + return name in self.ps() - @classmethod - def rm_sudo(cls, sudo:bool=True, verbose:bool=True): + + def rm_sudo(self, sudo:bool=True, verbose:bool=True): ''' To remove the requirement for sudo when using Docker, you can configure Docker to run without superuser privileges. Here's how you can do it: Create a Docker group (if it doesn't exist) and add your user to that group: @@ -99,12 +96,12 @@ def rm_sudo(cls, sudo:bool=True, verbose:bool=True): - @classmethod - def containers(cls, sudo:bool = False): - return [container['name'] for container in cls.ps(sudo=sudo)] - @classmethod - def chmod_scripts(cls): + def containers(self, sudo:bool = False): + return [container['name'] for container in self.ps(sudo=sudo)] + + + def chmod_scripts(self): c.cmd(f'bash -c "chmod +x {c.libpath}/scripts/*"', verbose=True) @@ -118,14 +115,14 @@ def install(self): c.cmd('./scripts/install_docker.sh', cwd=c.libpath, verbose=True,bash=True) - @classmethod - def install_docker_compose(cls, sudo=False): + + def install_docker_compose(self, sudo=False): return c.cmd('apt install docker-compose', verbose=True, sudo=True) # def build_commune(self, sudo=False): # self.build(path=self.libpath, sudo=sudo) - @classmethod - def images(cls, to_records=True): + + def images(self, to_records=True): text = c.cmd('docker images', verbose=False) df = [] cols = [] @@ -159,18 +156,18 @@ def rm_images(self, search:List[str]=None): return {'success': True, 'responses': responses } - @classmethod - def image2id(cls, image=None): + + def image2id(self, image=None): image2id = {} - df = cls.images() + df = self.images() for i in range(len(df)): image2id[df['REPOSITORY'][i]] = df['IMAGE_ID'][i] if image != None: id = image2id[image] return id - @classmethod - def deploy(cls, + + def deploy(self, image : str, cmd : str = 'ls', volumes:List[str] = None, @@ -196,7 +193,7 @@ def deploy(cls, docker_cmd += f' --net {net} ' if build: - cls.build(image, tag=name) + self.build(image, tag=name) if daemon: docker_cmd += ' -d ' @@ -242,8 +239,8 @@ def deploy(cls, # text_output = c.cmd(docker_cmd, verbose=True) # self.update() - @classmethod - def psdf(cls, load=True, save=False, idx_key ='container_id'): + + def psdf(self, load=True, save=False, idx_key ='container_id'): output_text = c.cmd('docker ps', verbose=False) rows = [] @@ -265,10 +262,10 @@ def psdf(cls, load=True, save=False, idx_key ='container_id'): df.set_index(idx_key, inplace=True) return df - @classmethod - def ps(cls, search = None, df:bool = False): + + def ps(self, search = None, df:bool = False): - psdf = cls.psdf() + psdf = self.psdf() paths = psdf['names'].tolist() if search != None: paths = [p for p in paths if p != None and search in p] @@ -277,18 +274,18 @@ def ps(cls, search = None, df:bool = False): paths = sorted(paths) return paths - @classmethod - def name2dockerfile(cls, path = None): - return {l.split('/')[-2] if len(l.split('/'))>1 else c.lib:l for l in cls.dockerfiles(path)} - @classmethod - def resolve_dockerfile(cls, name): + def name2dockerfile(self, path = None): + return {l.split('/')[-2] if len(l.split('/'))>1 else c.lib:l for l in self.dockerfiles(path)} + + + def resolve_dockerfile(self, name): if name == None: name = 'commune' if c.exists(name): return name - name2dockerfile = cls.name2dockerfile() + name2dockerfile = self.name2dockerfile() if name in name2dockerfile: return name2dockerfile[name] else: @@ -296,40 +293,40 @@ def resolve_dockerfile(cls, name): get_dockerfile = resolve_dockerfile - @classmethod - def compose_paths(cls, path = None): + + def compose_paths(self, path = None): if path is None: path = c.libpath + '/' return [l for l in c.walk(path) if l.endswith('docker-compose.yaml') or l.endswith('docker-compose.yml')] - @classmethod - def name2compose(cls, path=None): - compose_paths = cls.compose_paths(path) + + def name2compose(self, path=None): + compose_paths = self.compose_paths(path) return {l.split('/')[-2] if len(l.split('/'))>1 else c.lib:l for l in compose_paths} - @classmethod - def get_compose_path(cls, path:str): - path = cls.name2compose().get(path, path) + + def get_compose_path(self, path:str): + path = self.name2compose().get(path, path) return path - @classmethod - def get_compose(cls, path:str): - path = cls.get_compose_path(path) + + def get_compose(self, path:str): + path = self.get_compose_path(path) return c.load_yaml(path) - @classmethod - def put_compose(cls, path:str, compose:dict): - path = cls.get_compose_path(path) + + def put_compose(self, path:str, compose:dict): + path = self.get_compose_path(path) return c.save_yaml(path, compose) - # @classmethod - # def down(cls, path='frontend'): - # path = cls.get_compose_path(path) + # + # def down(self, path='frontend'): + # path = self.get_compose_path(path) # return c.cmd('docker-compose -f {path} down', verbose=True) - @classmethod - def compose(cls, + + def compose(self, path: str, compose: Union[str, dict, None] = None, daemon:bool = True, @@ -345,12 +342,12 @@ def compose(cls, cmd = f'docker-compose' if dash else f'docker compose' - path = cls.get_compose_path(path) + path = self.get_compose_path(path) if compose == None: - compose = cls.get_compose(path) + compose = self.get_compose(path) if isinstance(path, str): - compose = cls.get_compose(path) + compose = self.get_compose(path) if project_name != None: @@ -385,12 +382,12 @@ def compose(cls, text_output = c.cmd(cmd, verbose=True) c.rm(tmp_path) - @classmethod + def rm_container(self, name): c.cmd(f'docker rm -f {name}', verbose=True) - @classmethod - def logs(cls, name, sudo=False, follow=False, verbose=False, tail:int=2): + + def logs(self, name, sudo=False, follow=False, verbose=False, tail:int=2): cmd = f'docker logs {name} {"-f" if follow else ""} --tail {tail}' return c.cmd(cmd, verbose=verbose) @@ -398,20 +395,20 @@ def log_map(self, search=None): nodes = self.ps(search=search) return {name: self.logs(name) for name in nodes} - @classmethod - def tag(cls, image:str, tag:str): + + def tag(self, image:str, tag:str): c.cmd(f'docker tag {image} {tag}', verbose=True) c.cmd(f'docker push {tag}', verbose=True) - @classmethod + def login(self, username:str, password:str): c.cmd(f'docker login -u {username} -p {password}', verbose=True) - @classmethod + def logout(self, image:str): c.cmd(f'docker logout {image}', verbose=True) - @classmethod - def dockerfiles(cls, path = None): + + def dockerfiles(self, path = None): if path is None: path = c.libpath + '/' dockerfiles = [] @@ -428,9 +425,9 @@ def name2dockerfile(self, path = None): return {l.split('/')[-2] if len(l.split('/'))>1 else c.lib:l for l in self.dockerfiles(path)} - @classmethod - def dashboard(cls): - self = cls() + + def dashboard(self): + self = self() import streamlit as st containers = self.psdf() name2dockerfile = self.name2dockerfile() @@ -440,12 +437,8 @@ def dashboard(cls): dockerfile_text = c.get_text(dockerfile) st.code(dockerfile_text) - def prune(self): return c.cmd('docker container prune') - def start_docker(self): return c.cmd('systemctl start docker') - -Docker.run(__name__) \ No newline at end of file diff --git a/commune/modules/emoji/emoji.py b/modules/emoji/emoji.py similarity index 100% rename from commune/modules/emoji/emoji.py rename to modules/emoji/emoji.py diff --git a/modules/find/find.py b/modules/find/find.py new file mode 100644 index 000000000..fcbf77b9d --- /dev/null +++ b/modules/find/find.py @@ -0,0 +1,73 @@ +import commune as c +import json +import os + +class Search: + description = "This module is used to find files and modules in the current directory" + def forward(self, query='', mode='modules'): + return getattr(self, f'find_{mode}')(query=query) + + + @classmethod + def lines(self, search:str=None, path:str='./') -> list[str]: + """ + Finds the lines in text with search + """ + # if is a directory, get all files + file2lines = {} + for file, text in c.file2text(path).items(): + found_lines = [] + lines = text.split('\n') + idx2line = {idx:line for idx, line in enumerate(lines)} + for idx, line in idx2line.items(): + if search in line: + found_lines.append((idx, line)) + file2lines[file] = found_lines + return file2lines + + def query(self, options, + query='most relevant modules', + output_format="DICT(data:list[[key:str, score:float]])", + path='./', + anchor = 'OUTPUT', + n=10, + model='sonnet'): + + front_anchor = f"<{anchor}>" + back_anchor = f"" + print(f"Querying {query} with options {options}") + prompt = f""" + QUERY + {query} + OPTIONS + {options} + INSTRUCTION + get the top {n} functions that match the query + OUTPUT + (JSON ONLY AND ONLY RESPOND WITH THE FOLLOWING INCLUDING THE ANCHORS SO WE CAN PARSE) + {front_anchor}{output_format}{back_anchor} + """ + output = '' + for ch in c.ask(prompt, model=model): + print(ch, end='') + output += ch + if ch == front_anchor: + break + if '```json' in output: + output = output.split('```json')[1].split('```')[0] + elif front_anchor in output: + output = output.split(front_anchor)[1].split(back_anchor)[0] + else: + output = output + output = json.loads(output) + assert len(output) > 0 + return output + + def files(self, query='the file that is the core of commune', path='./', model='sonnet' ): + return self.query(options=c.files(path), query=query) + + def modules(self, query='the filel that is the core of commune', model='sonnet'): + return self.query(options=c.modules(), query=query) + + def utils(self, query='confuse the gradients'): + return self.query(query=query, options=c.get_utils()) diff --git a/commune/modules/git/git.py b/modules/git/git.py similarity index 100% rename from commune/modules/git/git.py rename to modules/git/git.py diff --git a/modules/marketplace/app/page.tsx b/modules/marketplace/app/page.tsx new file mode 100644 index 000000000..8f4b529e5 --- /dev/null +++ b/modules/marketplace/app/page.tsx @@ -0,0 +1,40 @@ + +```tsx +'use client'; +import { useState } from 'react'; +import Editor from '@/components/Editor'; +import FileExplorer from '@/components/FileExplorer'; +import ThemePicker from '@/components/ThemePicker'; +import Marketplace from '@/components/Marketplace'; + +export default function Home() { + const [currentFile, setCurrentFile] = useState(''); + const [files, setFiles] = useState<{[key: string]: string}>({}); + + return ( +
+ +
+ { + setFiles(prev => ({ + ...prev, + [currentFile]: content + })); + }} + /> +
+
+ ); +} +``` diff --git a/modules/marketplace/components/Editor.tsx b/modules/marketplace/components/Editor.tsx new file mode 100644 index 000000000..5cb05fdba --- /dev/null +++ b/modules/marketplace/components/Editor.tsx @@ -0,0 +1,25 @@ + +```tsx +'use client'; +import { useState } from 'react'; +import CodeMirror from '@uiw/react-codemirror'; +import { javascript } from '@codemirror/lang-javascript'; +import { dracula } from '@uiw/codemirror-theme-dracula'; + +interface EditorProps { + content: string; + onChange: (value: string) => void; +} + +export default function Editor({ content, onChange }: EditorProps) { + return ( + + ); +} +``` diff --git a/modules/marketplace/components/FileExplorer.tsx b/modules/marketplace/components/FileExplorer.tsx new file mode 100644 index 000000000..1d6f3be9d --- /dev/null +++ b/modules/marketplace/components/FileExplorer.tsx @@ -0,0 +1,54 @@ + +```tsx +interface FileExplorerProps { + files: {[key: string]: string}; + setFiles: (files: {[key: string]: string}) => void; + currentFile: string; + setCurrentFile: (file: string) => void; +} + +export default function FileExplorer({ + files, + setFiles, + currentFile, + setCurrentFile +}: FileExplorerProps) { + const createNewFile = () => { + const filename = prompt('Enter file name:'); + if (filename) { + setFiles(prev => ({ + ...prev, + [filename]: '' + })); + setCurrentFile(filename); + } + }; + + return ( +
+
+

Files

+ +
+
    + {Object.keys(files).map(filename => ( +
  • setCurrentFile(filename)} + > + {filename} +
  • + ))} +
+
+ ); +} +``` diff --git a/modules/marketplace/components/Marketplace.tsx b/modules/marketplace/components/Marketplace.tsx new file mode 100644 index 000000000..9bc930264 --- /dev/null +++ b/modules/marketplace/components/Marketplace.tsx @@ -0,0 +1,45 @@ + +```tsx +'use client'; +import { useState, useEffect } from 'react'; + +interface Repo { + id: number; + name: string; + description: string; + html_url: string; +} + +export default function Marketplace() { + const [repos, setRepos] = useState([]); + + useEffect(() => { + // Example: Fetch trending repos from GitHub + fetch('https://api.github.com/search/repositories?q=stars:>1&sort=stars') + .then(res => res.json()) + .then(data => setRepos(data.items.slice(0, 5))); + }, []); + + return ( +
+

Marketplace

+
+ {repos.map(repo => ( +
+

{repo.name}

+

{repo.description}

+ + View Repository + +
+ ))} +
+
+ ); +} +``` diff --git a/modules/marketplace/components/ThemePicker.tsx b/modules/marketplace/components/ThemePicker.tsx new file mode 100644 index 000000000..0275ef0b4 --- /dev/null +++ b/modules/marketplace/components/ThemePicker.tsx @@ -0,0 +1,32 @@ + +```tsx +'use client'; +import { useState } from 'react'; + +const themes = [ + { name: 'Dracula', value: 'dracula' }, + { name: 'GitHub', value: 'github' }, + { name: 'VS Code Dark', value: 'vscode-dark' }, +]; + +export default function ThemePicker() { + const [currentTheme, setCurrentTheme] = useState('dracula'); + + return ( +
+

Themes

+ +
+ ); +} +``` diff --git a/modules/marketplace/package.json b/modules/marketplace/package.json new file mode 100644 index 000000000..19e3f5f66 --- /dev/null +++ b/modules/marketplace/package.json @@ -0,0 +1,14 @@ + +```json +{ + "dependencies": { + "@codemirror/lang-javascript": "^6.0.0", + "@uiw/codemirror-theme-dracula": "^4.19.9", + "@uiw/react-codemirror": "^4.19.9", + "next": "13.4.19", + "react": "18.2.0", + "react-dom": "18.2.0", + "tailwindcss": "^3.3.3" + } +} +``` diff --git a/modules/memory/memory.py b/modules/memory/memory.py new file mode 100644 index 000000000..16e9ed7b9 --- /dev/null +++ b/modules/memory/memory.py @@ -0,0 +1,9 @@ +class Memory: + def __init__(self): + self.memory = {} + + def store(self, address, value): + self.memory[address] = value + + def load(self, address): + return self.memory.get(address, 0) \ No newline at end of file diff --git a/commune/modules/miner/miner.py b/modules/miner/miner.py similarity index 100% rename from commune/modules/miner/miner.py rename to modules/miner/miner.py diff --git a/commune/modules/model/__init__.py b/modules/model/__init__.py similarity index 100% rename from commune/modules/model/__init__.py rename to modules/model/__init__.py diff --git a/commune/modules/model/einsum/einsum.py b/modules/model/einsum/einsum.py similarity index 100% rename from commune/modules/model/einsum/einsum.py rename to modules/model/einsum/einsum.py diff --git a/commune/modules/model/model.py b/modules/model/model.py similarity index 100% rename from commune/modules/model/model.py rename to modules/model/model.py diff --git a/commune/modules/model/openai.py b/modules/model/openai.py similarity index 100% rename from commune/modules/model/openai.py rename to modules/model/openai.py diff --git a/commune/modules/model/openrouter.py b/modules/model/openrouter.py similarity index 91% rename from commune/modules/model/openrouter.py rename to modules/model/openrouter.py index 11d99ec19..bb1ace2fd 100644 --- a/commune/modules/model/openrouter.py +++ b/modules/model/openrouter.py @@ -25,10 +25,7 @@ def __init__( max_retries (int | None, optional): The maximum number of retries for the client. Defaults to None. """ - super().__init__() self.prompt = None - if api_key == None: - api_key = self.get_api_key() self.authenticate( api_key=api_key, @@ -69,20 +66,13 @@ def generate( prompt = prompt or self.prompt message = message + prompt if prompt else message model = self.resolve_model(model) - print(f'Generating with model: {model}') model_info = self.get_model_info(model) num_tokens = len(message) print(f'Sending {num_tokens} tokens -> {model}') max_tokens = min(max_tokens, model_info['context_length'] - num_tokens) messages = history.copy() messages.append({"role": "user", "content": message}) - result = self.client.chat.completions.create( - model=model, - messages=messages, - stream= bool(stream), - max_tokens = max_tokens, - temperature= temperature, - ) + result = self.client.chat.completions.create(model=model, messages=messages, stream= bool(stream), max_tokens = max_tokens, temperature= temperature ) if stream: @@ -111,7 +101,7 @@ def resolve_model(self, model=None): def authenticate( self, - api_key: str, + api_key: str = None, base_url: None = None, timeout: float | None = None, max_retries: int = 5, @@ -125,6 +115,8 @@ def authenticate( max_retries (int, optional): The maximum number of retries for the client. Defaults to 0. """ + if api_key == None: + api_key = self.get_api_key() self.client = openai.OpenAI( base_url=base_url, api_key=api_key, @@ -174,4 +166,5 @@ def filter_models(cls, models, search:str = None): def pricing(self, search: str = None , **kwargs): pricing = [{'name': k , **v['pricing']} for k,v in self.model2info(search=search, **kwargs).items()] - return c.df(pricing).sort_values('completion', ascending=False) \ No newline at end of file + return c.df(pricing).sort_values('completion', ascending=False) + diff --git a/commune/modules/model/pool.py b/modules/model/pool.py similarity index 91% rename from commune/modules/model/pool.py rename to modules/model/pool.py index 65993cca3..201cf8403 100644 --- a/commune/modules/model/pool.py +++ b/modules/model/pool.py @@ -7,7 +7,6 @@ def __init__(self, module='chat', network='local'): self.network = network def generate(self, *args, **kwargs): - c.print('Generating model') models = c.servers(self.module) client = c.choice(models) client = c.connect(client) diff --git a/commune/modules/model/sentence.py b/modules/model/sentence.py similarity index 100% rename from commune/modules/model/sentence.py rename to modules/model/sentence.py diff --git a/commune/modules/model/transformers/transformer.py b/modules/model/transformers/transformer.py similarity index 100% rename from commune/modules/model/transformers/transformer.py rename to modules/model/transformers/transformer.py diff --git a/commune/modules/model/utils.py b/modules/model/utils.py similarity index 100% rename from commune/modules/model/utils.py rename to modules/model/utils.py diff --git a/modules/openrouter/openrouter.py b/modules/openrouter/openrouter.py new file mode 100644 index 000000000..47d2def42 --- /dev/null +++ b/modules/openrouter/openrouter.py @@ -0,0 +1,148 @@ +from typing import Generator +import requests +import json +import openai +import commune as c + +class OpenRouter(c.Module): + + def __init__(self, api_key = None, base_url: str | None = 'https://openrouter.ai/api/v1', timeout: float | None = None, max_retries: int = 10, **kwargs): + """ + Initialize the OpenAI with the specified model, API key, timeout, and max retries. + + Args: + model (OPENAI_MODES): The OpenAI model to use. + api_key (API_KEY): The API key for authentication. + base_url (str | None, optional): can be used for openrouter api calls + timeout (float | None, optional): The timeout value for the client. Defaults to None. + max_retries (int | None, optional): The maximum number of retries for the client. Defaults to None. + """ + self.authenticate( api_key=api_key,base_url=base_url, timeout=timeout, max_retries=max_retries, ) + + def generate( + self, + message: str, + *extra_text , + history = None, + prompt: str = None, + system_prompt: str = None, + stream: bool = False, + model:str = 'claude-3-sonnet', + max_tokens: int = 100000, + temperature: float = 1.0, + ) -> str | Generator[str, None, None]: + """ + Generates a response using the OpenAI language model. + + Args: + message (str): The message to send to the language model. + history (ChatHistory): The conversation history. + stream (bool): Whether to stream the response or not. + max_tokens (int): The maximum number of tokens to generate. + temperature (float): The sampling temperature to use. + + Returns: + Generator[str] | str: A generator for streaming responses or the full streamed response. + """ + prompt = prompt or system_prompt + if len(extra_text) > 0: + message = message + ' '.join(extra_text) + history = history or [] + message = message + prompt if prompt else message + model = self.resolve_model(model) + model_info = self.get_model_info(model) + num_tokens = len(message) + print(f'Sending {num_tokens} tokens -> {model}') + max_tokens = min(max_tokens, model_info['context_length'] - num_tokens) + messages = history.copy() + messages.append({"role": "user", "content": message}) + result = self.client.chat.completions.create(model=model, messages=messages, stream= bool(stream), max_tokens = max_tokens, temperature= temperature ) + + + if stream: + def stream_generator( result): + for token in result: + yield token.choices[0].delta.content + return stream_generator(result) + else: + return result.choices[0].message.content + + forward = generate + + def resolve_model(self, model=None): + models = self.models() + if str(model) not in models: + if ',' in model: + models = [m for m in models if any([s in m for s in model.split(',')])] + else: + models = [m for m in models if str(model) in m] + print(f"Model {model} not found. Using {models} instead.") + assert len(models) > 0 + model = models[0] + + return model + + def authenticate( + self, + api_key: str = None, + base_url: None = None, + timeout: float | None = None, + max_retries: int = 5, + ) -> 'OpenAI': + """ + Authenticate the client with the provided API key, timeout, and max retries. + + Args: + api_key (str): The API key for authentication. + timeout (float | None, optional): The timeout value for the client. Defaults to None. + max_retries (int, optional): The maximum number of retries for the client. Defaults to 0. + + """ + self.client = openai.OpenAI( + base_url=base_url, + api_key=api_key or self.get_api_key(), + timeout=timeout, + max_retries=max_retries, + ) + return {"status": "success", "base_url": base_url} + + def model2info(self, search: str = None, path='models', max_age=100, update=False): + models = self.get(path, default={}, max_age=max_age, update=update) + if len(models) == 0: + print('Updating models...') + response = requests.get(self.url) + models = json.loads(response.text)['data'] + self.put(path, models) + + models = self.filter_models(models, search=search) + models = {m['id']:m for m in models} + return models + + def models(self, search: str = None, path='models', max_age=0, update=False): + return list(self.model2info(search=search, path=path, max_age=max_age, update=update).keys()) + + def model_infos(self, search: str = None, path='models', max_age=0, update=False): + return list(self.model2info(search=search, path=path, max_age=max_age, update=update).values()) + + def get_model_info(self, model): + model = self.resolve_model(model) + model2info = self.model2info() + return model2info[model] + + @classmethod + def filter_models(cls, models, search:str = None): + if search == None: + return models + if isinstance(models[0], str): + models = [{'id': m} for m in models] + if ',' in search: + search = [s.strip() for s in search.split(',')] + else: + search = [search] + models = [m for m in models if any([s in m['id'] for s in search])] + return [m for m in models] + + def pricing(self, search: str = None , **kwargs): + pricing = [{'name': k , **v['pricing']} for k,v in self.model2info(search=search, **kwargs).items()] + return c.df(pricing).sort_values('completion', ascending=False) + diff --git a/commune/modules/plotly/plotly.py b/modules/plotly/plotly.py similarity index 100% rename from commune/modules/plotly/plotly.py rename to modules/plotly/plotly.py diff --git a/commune/modules/py/py.py b/modules/py/py.py similarity index 100% rename from commune/modules/py/py.py rename to modules/py/py.py diff --git a/modules/reduce/reduce.py b/modules/reduce/reduce.py new file mode 100644 index 000000000..fd40910d9 --- /dev/null +++ b/modules/reduce/reduce.py @@ -0,0 +1,4 @@ + + + + diff --git a/commune/modules/remote/README.md b/modules/remote/README.md similarity index 100% rename from commune/modules/remote/README.md rename to modules/remote/README.md diff --git a/commune/modules/remote/app.py b/modules/remote/app.py similarity index 100% rename from commune/modules/remote/app.py rename to modules/remote/app.py diff --git a/commune/modules/remote/remote.py b/modules/remote/remote.py similarity index 100% rename from commune/modules/remote/remote.py rename to modules/remote/remote.py diff --git a/commune/modules/remote/ssh.py b/modules/remote/ssh.py similarity index 100% rename from commune/modules/remote/ssh.py rename to modules/remote/ssh.py diff --git a/commune/modules/repo/repo.py b/modules/repo/repo.py similarity index 100% rename from commune/modules/repo/repo.py rename to modules/repo/repo.py diff --git a/modules/sandbox.py b/modules/sandbox.py new file mode 100644 index 000000000..61d9117bd --- /dev/null +++ b/modules/sandbox.py @@ -0,0 +1,9 @@ +import commune as c + +data = { + 'data': '{"data": "fam laoniu CDL mining", "time": 1732752000}', + 'crypto_type': 1, + 'signature': 'd25da45e666449f8797786e2d86ba4758c393985bf8af0dd2cc055c21a38cb30b46114b0514748e0ba41ef05e5c99cb193eb5a239f15dbc684656f1d0cc14280', + 'address': '5DX4ytqpzDQmEfD9mxq5Gs7FaNnEpCiJh6s1qJkJzahGx8LC' +} +print(c.verify(data)) \ No newline at end of file diff --git a/commune/modules/scan/scan.py b/modules/scan/scan.py similarity index 100% rename from commune/modules/scan/scan.py rename to modules/scan/scan.py diff --git a/commune/modules/screen/requirements.txt b/modules/screen/requirements.txt similarity index 100% rename from commune/modules/screen/requirements.txt rename to modules/screen/requirements.txt diff --git a/commune/modules/screen/screen.py b/modules/screen/screen.py similarity index 100% rename from commune/modules/screen/screen.py rename to modules/screen/screen.py diff --git a/commune/modules/select/select.py b/modules/select/select.py similarity index 92% rename from commune/modules/select/select.py rename to modules/select/select.py index b38a6bd5b..034ff64bc 100644 --- a/commune/modules/select/select.py +++ b/modules/select/select.py @@ -33,6 +33,6 @@ def forward(self, output += ch output = output.split('')[1].split('')[0] output = json.loads(output)["data"] - output_schema = {k:c.fn_schema(module + '/'+k) for k,v in output} + output_schema = {k:c.schema(module + '/'+k) for k,v in output} return output_schema diff --git a/commune/modules/selenium/selenium.py b/modules/selenium/selenium.py similarity index 100% rename from commune/modules/selenium/selenium.py rename to modules/selenium/selenium.py diff --git a/commune/modules/serializer/bytes.py b/modules/serializer/bytes.py similarity index 100% rename from commune/modules/serializer/bytes.py rename to modules/serializer/bytes.py diff --git a/commune/modules/serializer/munch.py b/modules/serializer/munch.py similarity index 100% rename from commune/modules/serializer/munch.py rename to modules/serializer/munch.py diff --git a/commune/modules/serializer/numpy.py b/modules/serializer/numpy.py similarity index 100% rename from commune/modules/serializer/numpy.py rename to modules/serializer/numpy.py diff --git a/commune/modules/serializer/pandas.py b/modules/serializer/pandas.py similarity index 100% rename from commune/modules/serializer/pandas.py rename to modules/serializer/pandas.py diff --git a/commune/modules/serializer/serializer.py b/modules/serializer/serializer.py similarity index 100% rename from commune/modules/serializer/serializer.py rename to modules/serializer/serializer.py diff --git a/commune/modules/serializer/torch.py b/modules/serializer/torch.py similarity index 100% rename from commune/modules/serializer/torch.py rename to modules/serializer/torch.py diff --git a/commune/modules/storage/storage.py b/modules/storage/storage.py similarity index 100% rename from commune/modules/storage/storage.py rename to modules/storage/storage.py diff --git a/commune/modules/store/store.py b/modules/store/store.py similarity index 100% rename from commune/modules/store/store.py rename to modules/store/store.py diff --git a/commune/modules/streamlit/__init__.py b/modules/streamlit/__init__.py similarity index 100% rename from commune/modules/streamlit/__init__.py rename to modules/streamlit/__init__.py diff --git a/commune/modules/streamlit/auth/config_template.yaml b/modules/streamlit/auth/config_template.yaml similarity index 100% rename from commune/modules/streamlit/auth/config_template.yaml rename to modules/streamlit/auth/config_template.yaml diff --git a/commune/modules/streamlit/auth/streamlit_auth.py b/modules/streamlit/auth/streamlit_auth.py similarity index 100% rename from commune/modules/streamlit/auth/streamlit_auth.py rename to modules/streamlit/auth/streamlit_auth.py diff --git a/commune/modules/streamlit/plot.py b/modules/streamlit/plot.py similarity index 100% rename from commune/modules/streamlit/plot.py rename to modules/streamlit/plot.py diff --git a/commune/modules/streamlit/streamlit.py b/modules/streamlit/streamlit.py similarity index 100% rename from commune/modules/streamlit/streamlit.py rename to modules/streamlit/streamlit.py diff --git a/commune/modules/streamlit/styles/commune.css b/modules/streamlit/styles/commune.css similarity index 100% rename from commune/modules/streamlit/styles/commune.css rename to modules/streamlit/styles/commune.css diff --git a/commune/modules/streamlit/utils.py b/modules/streamlit/utils.py similarity index 100% rename from commune/modules/streamlit/utils.py rename to modules/streamlit/utils.py diff --git a/commune/modules/streamlit/watchdog/streamlit_watchdog.py b/modules/streamlit/watchdog/streamlit_watchdog.py similarity index 100% rename from commune/modules/streamlit/watchdog/streamlit_watchdog.py rename to modules/streamlit/watchdog/streamlit_watchdog.py diff --git a/commune/modules/streamlit/watchdog/streamlit_watchdog.yaml b/modules/streamlit/watchdog/streamlit_watchdog.yaml similarity index 100% rename from commune/modules/streamlit/watchdog/streamlit_watchdog.yaml rename to modules/streamlit/watchdog/streamlit_watchdog.yaml diff --git a/commune/modules/user/user.py b/modules/user/user.py similarity index 100% rename from commune/modules/user/user.py rename to modules/user/user.py diff --git a/commune/modules/web/web.py b/modules/web/web.py similarity index 100% rename from commune/modules/web/web.py rename to modules/web/web.py diff --git a/tests/test_key.py b/tests/test_key.py index ec33bb955..27c211a51 100644 --- a/tests/test_key.py +++ b/tests/test_key.py @@ -1,13 +1,14 @@ import commune as c -def test_encryption(value = 10): +def test_encryption(values = [10, 'fam', 'hello world']): cls = c.module('key') - value = str(value) - key = cls.new_key() - enc = key.encrypt(value) - dec = key.decrypt(enc) - assert dec == value, f'encryption failed, {dec} != {value}' + for value in values: + value = str(value) + key = cls.new_key() + enc = key.encrypt(value) + dec = key.decrypt(enc) + assert dec == value, f'encryption failed, {dec} != {value}' return {'encrypted':enc, 'decrypted': dec} def test_encryption_with_password(value = 10, password = 'fam'):