this repo has no description
0
fork

Configure Feed

Select the types of activity you want to include in your feed.

first commit

JoJoJux ee3d4749

+2425
+1
.gitignore
··· 1 + /.venv
+62
README.md
··· 1 + # `atplex` 2 + 3 + Name open for discussion; I just needed one but didn't want to think, so i took something where you don't need to think much. 4 + 5 + ## Idea 6 + 7 + A low-level python package for interacting with atproto data, 8 + enableing you to use typed python to communicate with XRPC endpoints 9 + using custom lexicons found on the web. 10 + 11 + ## What I want it to look in the end 12 + 13 + something like this 14 + (except I don't know yet if I want to make it sync or asnyc) 15 + 16 + ```bash 17 + python -m atplex pull com.example.getProfile 18 + ``` 19 + 20 + ```python 21 + import atplex 22 + import lex 23 + 24 + client = atplex.Client() 25 + 26 + data = await client.exec(lex.com.example.getProfile(user="jojojux.de")) 27 + print(data) # com.example.getProfile#main/output(did=DID("did:plc:f3f3dvty36ztjdqqyxfqhw3p"), name="jojojux.de", displayName=None) 28 + ``` 29 + 30 + ## Roadmap 31 + 32 + - [ ] Implement lexicon string formats as custom types 33 + - [x] NSID 34 + - [x] TID 35 + - [x] DID 36 + - [ ] CID 37 + - [ ] All the others 38 + - [x] Object based representation of lexicons 39 + - [x] Read lexicons into correct object representation 40 + - [x] Codegen from lexicon object representation 41 + - [x] `None` (partial, but wont-fix) (specifically: cannot be top-level type of a fragment) 42 + - [x] `bool`, `int`, `str`, `bytes` 43 + - [ ] `object` (partial) 44 + - [ ] `list` (partial) 45 + - [ ] `query` 46 + - [ ] `procedure` 47 + - [ ] `params` 48 + - [ ] `record` 49 + - [ ] `subscription` 50 + - [ ] Cache lexicon object representation for codegen-ed lexicons 51 + - [ ] Validate data against a lexicon object representation 52 + - [ ] Read data from known lexicons 53 + - [ ] Read data from unknown lexicons 54 + Decide: 55 + - Generate lexicon object representation on the fly? 56 + - Auto resolve lexicons? 57 + - [ ] CLI Tool to resolve & pull lexicons, do codegen, and more 58 + Idea is essentially sth like a package manager for lexicons 59 + - [ ] Basic XRPC Client 60 + - [ ] Figure out how to best store data that has to conform to the data model without all these fancy-custom types that lexicons have. 61 + - [ ] Improve all of the before 62 + - [ ] I don't know what
+2
lex/__init__.py
··· 1 + from . import com 2 + __all__ = ['com']
+2
lex/com/__init__.py
··· 1 + from . import example 2 + __all__ = ['example']
+2
lex/com/example/__init__.py
··· 1 + from .getProfile import getProfile 2 + __all__ = ['getProfile']
+36
lex/com/example/getProfile.py
··· 1 + from src.lexicon.model import BaseFragment, BaseExport 2 + 3 + 4 + class _meta_other2(BaseFragment): 5 + __lexicon__ = "com.example.getProfile" 6 + __fragment__ = "other" 7 + 8 + 9 + class _frag_other2(object, metaclass=_meta_other2): 10 + __qualname__ = "other" 11 + int: "_frag_other" 12 + 13 + class _meta_other(BaseFragment): 14 + __lexicon__ = "com.example.getProfile" 15 + __fragment__ = "other" 16 + 17 + 18 + class _frag_other(object, metaclass=_meta_other): 19 + __qualname__ = "other" 20 + int: "int" 21 + 22 + 23 + class _meta_main(BaseFragment): 24 + __lexicon__ = "com.example.getProfile" 25 + __fragment__ = "main" 26 + other2 = _frag_other2 27 + other = _frag_other 28 + 29 + 30 + class _frag_main(object, metaclass=_meta_main): 31 + __qualname__ = "main" 32 + test: "_frag_other" 33 + test2: "list | None" 34 + 35 + 36 + getProfile = _frag_main
+46
lexicon.json
··· 1 + { 2 + "lexicon": 1, 3 + "id": "com.example.getProfile", 4 + "defs": { 5 + "main": { 6 + "type": "query", 7 + "parameters": { 8 + "type": "params", 9 + "required": [ 10 + "user" 11 + ], 12 + "properties": { 13 + "user": { 14 + "type": "string" 15 + } 16 + } 17 + }, 18 + "output": { 19 + "encoding": "application/json", 20 + "schema": { 21 + "type": "object", 22 + "required": [ 23 + "did", 24 + "name" 25 + ], 26 + "properties": { 27 + "did": { 28 + "type": "string" 29 + }, 30 + "name": { 31 + "type": "string" 32 + }, 33 + "displayName": { 34 + "type": "string", 35 + "maxLength": 64 36 + }, 37 + "description": { 38 + "type": "string", 39 + "maxLength": 256 40 + } 41 + } 42 + } 43 + } 44 + } 45 + } 46 + }
+678
old_lexicon.py
··· 1 + import abc 2 + import base64 3 + from typing import Any 4 + from ..id.cid import CIDLink 5 + from ..id import NSID, TID, DID 6 + 7 + 8 + class _Empty: ... 9 + 10 + 11 + Empty = _Empty() 12 + 13 + type JSONObject = ( 14 + None | int | bool | str | list[JSONObject] | dict[str, JSONObject] | Empty 15 + ) 16 + 17 + 18 + class SchemaObject[T: Any](abc.ABC): 19 + @abc.abstractmethod 20 + def __init__(self, obj: dict[str, JSONObject]): ... 21 + 22 + @abc.abstractmethod 23 + def serialize(self, obj: T) -> JSONObject: ... 24 + 25 + @abc.abstractmethod 26 + def deserialize(self, obj: JSONObject) -> T: ... 27 + 28 + @abc.abstractmethod 29 + def validate_json(self, obj: JSONObject) -> bool: ... 30 + 31 + @abc.abstractmethod 32 + def validate(self, obj: T) -> bool: ... 33 + 34 + 35 + class LNull(SchemaObject[None]): 36 + __lexicon_type__: str = "null" 37 + 38 + def __init__(self, obj: dict[str, JSONObject]): 39 + if not isinstance(obj, dict): 40 + raise TypeError("Lexicon typedef must be object") 41 + assert obj.get("type") == "null" 42 + 43 + def serialize(self, obj: None) -> JSONObject: 44 + return None 45 + 46 + def deserialize(self, obj: JSONObject) -> None: 47 + return None 48 + 49 + def validate_json(self, obj: JSONObject) -> bool: 50 + return obj is None 51 + 52 + def validate(self, obj: None) -> bool: 53 + return obj is None 54 + 55 + 56 + class LBoolean(SchemaObject[bool]): 57 + __lexicon_type__: str = "boolean" 58 + default: bool | None = None 59 + const: bool | None = None 60 + 61 + def __init__(self, obj: dict[str, JSONObject]): 62 + if not isinstance(obj, dict): 63 + raise TypeError("Lexicon typedef must be object") 64 + assert obj.get("type") == "boolean" 65 + # Default is bool or None 66 + default = obj.get("default") 67 + assert isinstance(default, bool) or default is None 68 + self.default = default 69 + # Const is bool or None 70 + const = obj.get("const") 71 + assert isinstance(const, bool) or const is None 72 + self.const = const 73 + 74 + def serialize(self, obj: bool | None) -> JSONObject: 75 + # Const -> Empty 76 + if self.const is not None: 77 + assert self.const == obj 78 + return Empty 79 + # None -> Default 80 + if obj is None: 81 + assert self.default is not None 82 + return self.default 83 + assert isinstance(obj, bool) 84 + return obj 85 + 86 + def deserialize(self, obj: JSONObject) -> bool: 87 + # Const -> VALUE 88 + if self.const is not None: 89 + return self.const 90 + # None -> Default 91 + if obj is None: 92 + assert self.default is not None 93 + return self.default 94 + assert isinstance(obj, bool) 95 + return obj 96 + 97 + def validate_json(self, obj: JSONObject) -> bool: 98 + # Const: Must be empty 99 + if self.const is not None: 100 + # TODO: Must obj be Empty or can it opt be eq const? 101 + return obj is Empty 102 + # Empty: Must have default 103 + if obj is Empty: 104 + return self.default is not None 105 + # Must be bool 106 + return isinstance(obj, bool) 107 + 108 + def validate(self, obj: bool | None) -> bool: 109 + # Const: Must be eq to const 110 + if self.const is not None: 111 + return obj == self.const 112 + # Must be bool or opt None if default exists 113 + return isinstance(obj, bool) or (self.default is not None and obj is None) 114 + 115 + 116 + class LInteger(SchemaObject[int]): 117 + __lexicon_type__: str = "integer" 118 + minimum: int | None = None 119 + maximum: int | None = None 120 + enum: list[int] | None = None 121 + default: int | None = None 122 + const: int | None = None 123 + 124 + def __init__(self, obj: dict[str, JSONObject]): 125 + if not isinstance(obj, dict): 126 + raise TypeError("Lexicon typedef must be object") 127 + assert obj.get("type") == "integer" 128 + # Default is int or None 129 + default = obj.get("default") 130 + assert isinstance(default, int) or default is None 131 + self.default = default 132 + # Const is int or None 133 + const = obj.get("const") 134 + assert isinstance(const, int) or const is None 135 + self.const = const 136 + # Enum is list[int] or None 137 + enum = obj.get("enum") 138 + assert ( 139 + isinstance(enum, list) and all(isinstance(element, int) for element in enum) 140 + ) or enum is None 141 + # Minimum is int or None 142 + minimum = obj.get("minimum") 143 + assert isinstance(minimum, int) or minimum is None 144 + self.minimum = minimum 145 + # Maximum is int or None 146 + maximum = obj.get("maximum") 147 + assert isinstance(maximum, int) or maximum is None 148 + self.maximum = maximum 149 + self.enum = enum # type: ignore # Linter does not see: all(isinstance(element, int) for element in enum) 150 + 151 + def serialize(self, obj: int | None) -> JSONObject: 152 + # Const -> Empty 153 + if self.const is not None: 154 + assert self.const == obj 155 + return Empty 156 + # None -> Default 157 + if obj is None: 158 + assert self.default is not None 159 + return self.default 160 + assert self.validate(obj) 161 + assert isinstance(obj, int) 162 + return obj 163 + 164 + def deserialize(self, obj: JSONObject) -> int: 165 + # Const -> VALUE 166 + if self.const: 167 + return self.const 168 + # None -> Default 169 + if obj is None: 170 + assert self.default is not None 171 + return self.default 172 + assert self.validate_json(obj) 173 + assert isinstance(obj, int) 174 + return obj 175 + 176 + def validate_json(self, obj: JSONObject) -> bool: 177 + # Const: Must be Empty 178 + if self.const is not None: 179 + return obj is Empty 180 + # Empty: Must have default 181 + if obj is Empty: 182 + return self.default is not None 183 + # If enum, must be included 184 + if self.enum and obj not in self.enum: 185 + return False 186 + # Must be int 187 + if not isinstance(obj, int): 188 + return False 189 + # May not be out of bounds 190 + if self.minimum is not None and obj < self.minimum: 191 + return False 192 + if self.maximum is not None and obj > self.maximum: 193 + return False 194 + return True 195 + 196 + def validate(self, obj: int | None) -> bool: 197 + # Const: Must be eq to const 198 + if self.const is not None: 199 + return obj == self.const 200 + # If enum, must be in enum 201 + if self.enum and obj not in self.enum: 202 + return False 203 + # None: Must have default 204 + if obj is None: 205 + return self.default is not None 206 + # Must be int 207 + if not isinstance(obj, int): 208 + return False 209 + # May not be out of bounds 210 + if self.minimum is not None and obj < self.minimum: 211 + return False 212 + if self.maximum is not None and obj > self.maximum: 213 + return False 214 + return True 215 + 216 + 217 + type StringFormatable = TID | NSID 218 + STRING_FORMATABLE = (TID, NSID) 219 + STRING_FORMATS = { 220 + "at-identifier": ..., 221 + "at-uri": ..., 222 + "cid": ..., 223 + "datetime": ..., 224 + "did": DID, 225 + "handle": ..., 226 + "nsid": NSID, 227 + "tid": TID, 228 + "record-key": ..., 229 + "uri": ..., 230 + "language": ..., 231 + } 232 + 233 + 234 + class LString(SchemaObject[str | StringFormatable]): 235 + __lexicon_type__: str = "string" 236 + description: str | None = None 237 + format: str | None = None 238 + max_length: int | None = None 239 + min_length: int | None = None 240 + maxGraphemes: int | None = None 241 + minGraphemes: int | None = None 242 + known_values: list[str] | None = None 243 + enum: list[str] | None = None 244 + default: str | None = None 245 + const: str | None = None 246 + 247 + def __init__(self, obj: dict[str, JSONObject]): 248 + if not isinstance(obj, dict): 249 + raise TypeError("Lexicon typedef must be object") 250 + assert obj.get("type") == "string" 251 + # Default is str or None 252 + default = obj.get("default") 253 + assert isinstance(default, str) or default is None 254 + self.default = default 255 + # Const is str or None 256 + const = obj.get("const") 257 + assert isinstance(const, str) or const is None 258 + self.const = const 259 + # Format is str or None 260 + format_ = obj.get("format") 261 + assert isinstance(format_, str) or format_ is None 262 + self.format = format_ 263 + # Enum is list[str] or None 264 + enum = obj.get("enum") 265 + assert ( 266 + isinstance(enum, list) and all(isinstance(element, int) for element in enum) 267 + ) or enum is None 268 + self.enum = enum # type: ignore # Linter does not see: all(isinstance(element, str) for element in enum) 269 + # minLength is int or None 270 + min_length = obj.get("minLength") 271 + assert isinstance(min_length, int) or min_length is None 272 + self.min_length = min_length 273 + # maxLength is int or None 274 + max_length = obj.get("maxLength") 275 + assert isinstance(max_length, int) or max_length is None 276 + self.max_length = max_length 277 + 278 + def serialize(self, obj: StringFormatable | str | None) -> JSONObject: 279 + # Const -> Empty 280 + if self.const is not None: 281 + assert self.const == obj 282 + return Empty 283 + # None -> Default 284 + if obj is None: 285 + assert self.default is not None 286 + return self.default 287 + assert self.validate(obj) 288 + if self.format is not None: 289 + assert isinstance(obj, STRING_FORMATS[self.format]) 290 + obj = str(obj) 291 + assert isinstance(obj, str) 292 + return obj 293 + 294 + def deserialize(self, obj: JSONObject) -> str | StringFormatable: 295 + # Const -> VALUE 296 + if self.const: 297 + return self.const 298 + # None -> Default 299 + if obj is None: 300 + assert self.default is not None 301 + return self.default 302 + assert self.validate_json(obj) 303 + assert isinstance(obj, str) 304 + if self.format is not None: 305 + obj = STRING_FORMATS[self.format](obj) 306 + return obj 307 + 308 + def validate_json(self, obj: JSONObject) -> bool: 309 + # Const: Must be Empty 310 + if self.const is not None: 311 + return obj is Empty 312 + # Empty: Must have default 313 + if obj is Empty: 314 + return self.default is not None 315 + # If enum, must be included 316 + if self.enum and obj not in self.enum: 317 + return False 318 + # Must be str 319 + if not isinstance(obj, str): 320 + return False 321 + # May not be out of bounds 322 + if self.min_length is not None and len(obj) < self.min_length: 323 + return False 324 + if self.max_length is not None and len(obj) > self.max_length: 325 + return False 326 + # Can be formatable 327 + if self.format is not None: 328 + # try: 329 + # TODO: Improve this ugly thind 330 + STRING_FORMATS[self.format](obj) 331 + # except Exception: 332 + # return False 333 + return True 334 + 335 + def validate(self, obj: StringFormatable | str | None) -> bool: 336 + # Const: Must be eq to const 337 + if self.const is not None: 338 + return obj == self.const 339 + # If enum, must be in enum 340 + if self.enum and obj not in self.enum: 341 + return False 342 + # None: Must have default 343 + if obj is None: 344 + return self.default is not None 345 + # Can be formatable 346 + if self.format is not None: 347 + if not isinstance(obj, STRING_FORMATS[self.format]): 348 + return False 349 + obj = str(obj) 350 + # Must be str 351 + if not isinstance(obj, str): 352 + return False 353 + # May not be out of bounds 354 + if self.min_length is not None and len(obj) < self.min_length: 355 + return False 356 + if self.max_length is not None and len(obj) > self.max_length: 357 + return False 358 + return True 359 + 360 + 361 + class LBytes(SchemaObject[bytes]): 362 + __lexicon_type__: str = "bytes" 363 + description: str | None = None 364 + format: str | None = None 365 + max_length: int | None = None 366 + min_length: int | None = None 367 + 368 + def __init__(self, obj: dict[str, JSONObject]): 369 + if not isinstance(obj, dict): 370 + raise TypeError("Lexicon typedef must be object") 371 + assert obj.get("type") == "bytes" 372 + # minLength is int or None 373 + min_length = obj.get("minLength") 374 + assert isinstance(min_length, int) or min_length is None 375 + self.min_length = min_length 376 + # maxLength is int or None 377 + max_length = obj.get("maxLength") 378 + assert isinstance(max_length, int) or max_length is None 379 + self.max_length = max_length 380 + 381 + def serialize(self, obj: bytes) -> JSONObject: 382 + assert self.min_length is None or len(obj) >= self.min_length 383 + assert self.max_length is None or len(obj) <= self.max_length 384 + return {"$bytes": base64.b64encode(obj).decode("ascii")} 385 + 386 + def deserialize(self, obj: JSONObject) -> bytes: 387 + assert isinstance(obj, dict) 388 + assert len(obj.keys()) == 1 389 + vobj = obj.get("$bytes") 390 + assert isinstance(vobj, str) 391 + bobj = base64.b64decode(vobj) 392 + # May not be out of bounds 393 + assert self.min_length is None or len(bobj) >= self.min_length 394 + assert self.max_length is None or len(bobj) <= self.max_length 395 + return bobj 396 + 397 + def validate_json(self, obj: JSONObject) -> bool: 398 + if not isinstance(obj, dict): 399 + return False 400 + if len(obj.keys()) != 1: 401 + return False 402 + vobj = obj.get("$bytes") 403 + if not isinstance(vobj, str): 404 + return False 405 + bobj = base64.b64decode(vobj) 406 + # May not be out of bounds 407 + if self.min_length is not None and len(bobj) < self.min_length: 408 + return False 409 + if self.max_length is not None and len(bobj) > self.max_length: 410 + return False 411 + return True 412 + 413 + def validate(self, obj: bytes | None) -> bool: 414 + if not isinstance(obj, bytes): 415 + return False 416 + # May not be out of bounds 417 + if self.min_length is not None and len(obj) < self.min_length: 418 + return False 419 + if self.max_length is not None and len(obj) > self.max_length: 420 + return False 421 + return True 422 + 423 + 424 + class LArray[T](SchemaObject[list[SchemaObject[T]]]): 425 + __lexicon_type__: str = "array" 426 + items: SchemaObject[T] 427 + description: str | None = None 428 + min_length: int | None = None 429 + max_length: int | None = None 430 + 431 + def __init__(self, obj: dict[str, JSONObject]): 432 + if not isinstance(obj, dict): 433 + raise TypeError("Lexicon typedef must be object") 434 + assert obj.get("type") == "array" 435 + # minLength is int or None 436 + min_length = obj.get("minLength") 437 + assert isinstance(min_length, int) or min_length is None 438 + self.min_length = min_length 439 + # maxLength is int or None 440 + max_length = obj.get("maxLength") 441 + assert isinstance(max_length, int) or max_length is None 442 + self.max_length = max_length 443 + # items_obj is SchemaObject 444 + items = obj.get("items") 445 + assert isinstance(items, dict) 446 + items_obj = LexiconObject(items) 447 + self.items = items_obj # type: ignore # Type Hinter is dumb 448 + 449 + def serialize(self, obj: list | None) -> JSONObject: 450 + assert isinstance(obj, list) 451 + assert self.validate(obj) 452 + return [self.items.serialize(element) for element in obj] 453 + 454 + def deserialize(self, obj: JSONObject) -> list: 455 + assert self.validate_json(obj) 456 + assert isinstance(obj, list) 457 + return [self.items.deserialize(element) for element in obj] 458 + 459 + def validate_json(self, obj: JSONObject) -> bool: 460 + # Must be list 461 + if not isinstance(obj, list): 462 + return False 463 + # validate childs 464 + if not all(self.items.validate_json(element) for element in obj): 465 + return False 466 + # May not be out of bounds 467 + if self.min_length is not None and len(obj) < self.min_length: 468 + return False 469 + if self.max_length is not None and len(obj) > self.max_length: 470 + return False 471 + return True 472 + 473 + def validate(self, obj: list) -> bool: 474 + # Must be str 475 + if not isinstance(obj, list): 476 + return False 477 + # validate childs 478 + if not all(self.items.validate(element) for element in obj): 479 + return False 480 + # May not be out of bounds 481 + if self.min_length is not None and len(obj) < self.min_length: 482 + return False 483 + if self.max_length is not None and len(obj) > self.max_length: 484 + return False 485 + return True 486 + 487 + 488 + class LObject[T](SchemaObject[dict[str, SchemaObject[T]]]): 489 + __lexicon_type__: str = "object" 490 + properties: dict[str, "SchemaObject[T]"] 491 + description: str | None = None 492 + required: list[str] | None = None 493 + nullable: list[str] | None = None 494 + 495 + def __init__(self, obj: dict[str, JSONObject]): 496 + if not isinstance(obj, dict): 497 + raise TypeError("Lexicon typedef must be object") 498 + assert obj.get("type") == "object" 499 + # required is list[str] or None 500 + required = obj.get("required") 501 + if required is not None: 502 + assert isinstance(required, list) 503 + assert all(isinstance(key, str) for key in required) 504 + self.required = required # type: ignore # Again 505 + # nullable is list[str] or None 506 + nullable = obj.get("nullable") 507 + if nullable is not None: 508 + assert isinstance(nullable, list) 509 + assert all(isinstance(key, str) for key in nullable) 510 + self.nullable = nullable # type: ignore # And once again 511 + # properties_obj is dict[str, SchemaObject] 512 + properties = obj.get("properties") 513 + assert isinstance(properties, dict) 514 + assert all( 515 + isinstance(key, str) and isinstance(value, dict) 516 + for key, value in properties.items() 517 + ) 518 + properties_obj = { 519 + key: LexiconObject(value) # type: ignore # I hate this 520 + for key, value in properties.items() 521 + } 522 + self.properties = properties_obj 523 + 524 + def serialize(self, obj: dict | None) -> JSONObject: 525 + assert isinstance(obj, dict) 526 + assert self.validate(obj) 527 + return { 528 + key: None 529 + if value is None and self.nullable is not None and key in self.nullable 530 + else self.properties[key].serialize(value) 531 + for key, value in obj.items() 532 + } 533 + 534 + def deserialize(self, obj: JSONObject) -> dict: 535 + assert isinstance(obj, dict) 536 + assert self.validate_json(obj) 537 + return { 538 + key: None 539 + if value is None and self.nullable is not None and key in self.nullable 540 + else self.properties[key].deserialize(value) 541 + for key, value in obj.items() 542 + } 543 + 544 + def validate_json(self, obj: JSONObject) -> bool: 545 + # Must be dict 546 + if not isinstance(obj, dict): 547 + return False 548 + if self.required is not None: 549 + for key in self.required: 550 + if obj.get("key", Empty) is Empty: 551 + return False 552 + for key in self.properties: 553 + if obj.get(key, Empty) is Empty: 554 + continue 555 + if self.nullable is not None and key in self.nullable and obj[key] is None: 556 + continue 557 + 558 + if not self.properties[key].validate_json(obj[key]): 559 + return False 560 + return True 561 + 562 + def validate(self, obj: dict) -> bool: 563 + # Must be dict 564 + if not isinstance(obj, dict): 565 + return False 566 + if self.required is not None: 567 + for key in self.required: 568 + if key not in obj: 569 + return False 570 + for key in self.properties: 571 + if key not in obj: 572 + continue 573 + if self.nullable is not None and key in self.nullable and obj[key] is None: 574 + continue 575 + 576 + if not self.properties[key].validate(obj[key]): 577 + print(key, "is invalid") 578 + return False 579 + return True 580 + 581 + 582 + class LParams[T](SchemaObject[dict[str, SchemaObject[T]]]): 583 + __lexicon_type__: str = "params" 584 + properties: dict[str, "SchemaObject[T]"] 585 + description: str | None = None 586 + required: list[str] | None = None 587 + 588 + def __init__(self, obj: dict[str, JSONObject]): 589 + if not isinstance(obj, dict): 590 + raise TypeError("Lexicon typedef must be object") 591 + assert obj.get("type") == "params" 592 + # required is list[str] or None 593 + required = obj.get("required") 594 + if required is not None: 595 + assert isinstance(required, list) 596 + assert all(isinstance(key, str) for key in required) 597 + self.required = required # type: ignore # Again 598 + # properties_obj is dict[str, SchemaObject] 599 + properties = obj.get("properties") 600 + assert isinstance(properties, dict) 601 + assert all( 602 + isinstance(key, str) 603 + and isinstance(value, dict) 604 + and value.get("type") 605 + in ("string", "integer", "boolean", "unknown") # TODO: array 606 + for key, value in properties.items() 607 + ) 608 + properties_obj = { 609 + key: LexiconObject(value) # type: ignore # I hate this 610 + for key, value in properties.items() 611 + } 612 + self.properties = properties_obj 613 + 614 + def serialize(self, obj: dict | None) -> JSONObject: 615 + assert isinstance(obj, dict) 616 + assert self.validate(obj) 617 + return { 618 + key: self.properties[key].serialize(value) for key, value in obj.items() 619 + } 620 + 621 + def deserialize(self, obj: JSONObject) -> dict: 622 + assert isinstance(obj, dict) 623 + assert self.validate_json(obj) 624 + return { 625 + key: self.properties[key].deserialize(value) for key, value in obj.items() 626 + } 627 + 628 + def validate_json(self, obj: JSONObject) -> bool: 629 + # Must be dict 630 + if not isinstance(obj, dict): 631 + return False 632 + if self.required is not None: 633 + for key in self.required: 634 + if obj.get("key", Empty) is Empty: 635 + return False 636 + for key in self.properties: 637 + if obj.get(key, Empty) is Empty: 638 + continue 639 + 640 + if not self.properties[key].validate_json(obj[key]): 641 + return False 642 + return True 643 + 644 + def validate(self, obj: dict) -> bool: 645 + # Must be dict 646 + if not isinstance(obj, dict): 647 + return False 648 + if self.required is not None: 649 + for key in self.required: 650 + if key not in obj: 651 + return False 652 + for key in self.properties: 653 + if key not in obj: 654 + continue 655 + 656 + if not self.properties[key].validate(obj[key]): 657 + print(key, "is invalid") 658 + return False 659 + return True 660 + 661 + 662 + TYPE_MAP: dict[str, type[SchemaObject]] = { 663 + "null": LNull, 664 + "boolean": LBoolean, 665 + "integer": LInteger, 666 + "string": LString, 667 + "bytes": LBytes, 668 + "array": LArray, 669 + "object": LObject, 670 + "params": LParams, 671 + } 672 + 673 + 674 + def LexiconObject(obj: dict[str, JSONObject]): 675 + type_ = obj.get("type") 676 + if type_ not in TYPE_MAP: 677 + raise TypeError(f"Cannot find lexcion type '{type_}'") 678 + return TYPE_MAP[type_](obj)
+94
poetry.lock
··· 1 + # This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. 2 + 3 + [[package]] 4 + name = "dataclasses-json" 5 + version = "0.6.7" 6 + description = "Easily serialize dataclasses to and from JSON." 7 + optional = false 8 + python-versions = "<4.0,>=3.7" 9 + groups = ["main"] 10 + files = [ 11 + {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, 12 + {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, 13 + ] 14 + 15 + [package.dependencies] 16 + marshmallow = ">=3.18.0,<4.0.0" 17 + typing-inspect = ">=0.4.0,<1" 18 + 19 + [[package]] 20 + name = "marshmallow" 21 + version = "3.26.1" 22 + description = "A lightweight library for converting complex datatypes to and from native Python datatypes." 23 + optional = false 24 + python-versions = ">=3.9" 25 + groups = ["main"] 26 + files = [ 27 + {file = "marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c"}, 28 + {file = "marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6"}, 29 + ] 30 + 31 + [package.dependencies] 32 + packaging = ">=17.0" 33 + 34 + [package.extras] 35 + dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] 36 + docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"] 37 + tests = ["pytest", "simplejson"] 38 + 39 + [[package]] 40 + name = "mypy-extensions" 41 + version = "1.1.0" 42 + description = "Type system extensions for programs checked with the mypy type checker." 43 + optional = false 44 + python-versions = ">=3.8" 45 + groups = ["main"] 46 + files = [ 47 + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, 48 + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, 49 + ] 50 + 51 + [[package]] 52 + name = "packaging" 53 + version = "25.0" 54 + description = "Core utilities for Python packages" 55 + optional = false 56 + python-versions = ">=3.8" 57 + groups = ["main"] 58 + files = [ 59 + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, 60 + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, 61 + ] 62 + 63 + [[package]] 64 + name = "typing-extensions" 65 + version = "4.15.0" 66 + description = "Backported and Experimental Type Hints for Python 3.9+" 67 + optional = false 68 + python-versions = ">=3.9" 69 + groups = ["main"] 70 + files = [ 71 + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, 72 + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, 73 + ] 74 + 75 + [[package]] 76 + name = "typing-inspect" 77 + version = "0.9.0" 78 + description = "Runtime inspection utilities for typing module." 79 + optional = false 80 + python-versions = "*" 81 + groups = ["main"] 82 + files = [ 83 + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, 84 + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, 85 + ] 86 + 87 + [package.dependencies] 88 + mypy-extensions = ">=0.3.0" 89 + typing-extensions = ">=3.7.4" 90 + 91 + [metadata] 92 + lock-version = "2.1" 93 + python-versions = ">=3.13,<4.0" 94 + content-hash = "1dcfeebefe3b4358a15c0e5838093c68d46706f10bcf224eb9d49d1fa951af60"
+15
pyproject.toml
··· 1 + [project] 2 + name = "atplex" 3 + version = "0.1.0" 4 + description = "" 5 + authors = [ 6 + { name = "JoJoJux", email = "80523427+J0J0HA@users.noreply.github.com" }, 7 + ] 8 + readme = "README.md" 9 + requires-python = ">=3.13,<4.0" 10 + dependencies = ["dataclasses-json (>=0.6.7,<0.7.0)"] 11 + 12 + 13 + [build-system] 14 + requires = ["poetry-core>=2.0.0,<3.0.0"] 15 + build-backend = "poetry.core.masonry.api"
+168
result.py
··· 1 + import abc 2 + from typing import Any, Callable, NoReturn 3 + 4 + # This file is not in any way related to the project, 5 + # I just wanted to built Rust's error handling in python so here it is. 6 + 7 + 8 + class Result[T: Any, E: Exception](abc.ABC): 9 + @abc.abstractmethod 10 + def expect(self, message: str) -> T | NoReturn: ... 11 + 12 + @abc.abstractmethod 13 + def unwrap(self) -> T | NoReturn: ... 14 + 15 + @abc.abstractmethod 16 + def unwrap_or(self, value: T) -> T: ... 17 + 18 + @abc.abstractmethod 19 + def unwrap_or_else(self, func: Callable[..., T]) -> T: ... 20 + 21 + @abc.abstractmethod 22 + def unwrap_unchecked(self) -> T | E: ... 23 + 24 + @abc.abstractmethod 25 + def expect_err(self, message: str) -> E | NoReturn: ... 26 + 27 + @abc.abstractmethod 28 + def unwrap_err(self) -> E | NoReturn: ... 29 + 30 + @abc.abstractmethod 31 + def unwrap_err_unchecked(self) -> E | T: ... 32 + 33 + @abc.abstractmethod 34 + def is_err(self) -> bool: ... 35 + 36 + @abc.abstractmethod 37 + def is_ok(self) -> bool: ... 38 + 39 + def ok(self) -> T | None: 40 + if self.is_ok(): 41 + return ~self 42 + return None 43 + 44 + def err(self) -> E | None: 45 + if self.is_err(): 46 + return -self 47 + return None 48 + 49 + def transpose(self) -> "Result[T, E] | None": 50 + if self.is_ok() and self.ok() is None: 51 + return None 52 + return self 53 + 54 + def __invert__(self) -> T | NoReturn: 55 + return self.unwrap() 56 + 57 + def __neg__(self) -> E: 58 + return self.unwrap_err() 59 + 60 + def __and__(self, other: "Result") -> "Result[T, E]": 61 + return self if self.is_err() else other 62 + 63 + def __or__(self, other: "Result") -> "Result[T, E]": 64 + return self if self.is_ok() else other 65 + 66 + def __rshift__(self, func: Callable[..., "Result[T, E]"]) -> "Result[T, E]": 67 + if self.is_err(): 68 + return self 69 + return func() 70 + 71 + def __floordiv__(self, func: Callable[..., "Result[T, E]"]) -> "Result[T, E]": 72 + if self.is_ok(): 73 + return self 74 + return func() 75 + 76 + 77 + class Ok[T: Any, E: Exception](Result[T, E]): 78 + __match_args__ = ("value",) 79 + value: Any 80 + 81 + def __init__(self, value: T): 82 + self.value = value 83 + 84 + def expect(self, message: str) -> T: 85 + return self.value 86 + 87 + def unwrap(self) -> T: 88 + return self.value 89 + 90 + def unwrap_or(self, value: T) -> T: 91 + return self.value 92 + 93 + def unwrap_or_else(self, func: Callable[[], T]) -> T: 94 + return self.value 95 + 96 + def unwrap_unchecked(self) -> T: 97 + return self.value 98 + 99 + def expect_err(self, message: str) -> NoReturn: 100 + raise RuntimeError(message) 101 + 102 + def unwrap_err(self) -> NoReturn: 103 + raise RuntimeError(self.value) 104 + 105 + def unwrap_err_unchecked(self) -> T: 106 + return self.value 107 + 108 + def is_err(self) -> bool: 109 + return False 110 + 111 + def is_ok(self) -> bool: 112 + return True 113 + 114 + def __repr__(self) -> str: 115 + return f"Ok({repr(self.value)})" 116 + 117 + 118 + class Err[T: Any, E: Exception](Result[T, E]): 119 + __match_args__ = ("message",) 120 + message: E 121 + 122 + def __init__(self, message: E): 123 + self.message = message 124 + 125 + def expect(self, message: str) -> NoReturn: 126 + raise RuntimeError(message) 127 + 128 + def unwrap(self) -> NoReturn: 129 + raise self.message 130 + 131 + def unwrap_or(self, value: T) -> T: 132 + return value 133 + 134 + def unwrap_or_else(self, func: Callable[[], T]) -> T: 135 + return func() 136 + 137 + def unwrap_unchecked(self) -> E: 138 + return self.message 139 + 140 + def expect_err(self, message: str) -> E: 141 + return self.message 142 + 143 + def unwrap_err(self) -> E: 144 + return self.message 145 + 146 + def unwrap_err_unchecked(self) -> E: 147 + return self.message 148 + 149 + def is_ok(self) -> bool: 150 + return False 151 + 152 + def is_err(self) -> bool: 153 + return True 154 + 155 + def __repr__(self) -> str: 156 + return f"Err({repr(self.message)})" 157 + 158 + 159 + result_a = Ok(10) 160 + result_b = Err(Exception("hello")) 161 + 162 + match result_b: 163 + case Ok(result): 164 + print("+", result) 165 + case Err(error): 166 + print("-", error) 167 + 168 + print(result_a & result_b)
src/__init__.py

This is a binary file and will not be displayed.

src/coding/__init__.py

This is a binary file and will not be displayed.

+22
src/coding/base32_sortable.py
··· 1 + ALPHABET = "234567abcdefghijklmnopqrstuvwxyz" 2 + DECODING_MAP = {v: k for k, v in enumerate(ALPHABET)} 3 + 4 + 5 + def encode_int(data: int, length: int) -> str: 6 + encoded = "" 7 + for i in range(0, length, 5): 8 + c = data & 0b11111 9 + data >>= 5 10 + encoded += ALPHABET[c] 11 + return "".join(reversed(encoded)) 12 + 13 + 14 + def decode_int(data: str) -> int: 15 + decoded = 0 16 + for letter in data: 17 + if letter not in DECODING_MAP: 18 + raise ValueError("Non-base32-sortable digit found") 19 + 20 + decoded <<= 5 21 + decoded += DECODING_MAP[letter] 22 + return decoded
+7
src/id/__init__.py
··· 1 + from .did import DID 2 + from .nsid import NSID, NSIDGlob 3 + from .tid import TID 4 + 5 + type StringableIdentifier = DID | NSID | TID 6 + 7 + __all__ = ["DID", "NSID", "NSIDGlob", "TID", "StringableIdentifier"]
+5
src/id/cid.py
··· 1 + class CIDLink: 2 + link: str 3 + 4 + def __init__(self, link: str): 5 + self.link = link
+58
src/id/did.py
··· 1 + import enum 2 + import re 3 + from typing import overload 4 + 5 + 6 + class DIDMethod(enum.StrEnum): 7 + WEB = "web" 8 + PLC = "plc" 9 + 10 + 11 + class DID: 12 + __lexicon_string_format__ = "did" 13 + 14 + PATTERN = re.compile(r"^did:[a-z]+:[a-zA-Z0-9._:%-]*[a-zA-Z0-9._-]$") 15 + METHOD_PATTERN = re.compile(r"^[a-z]+$") 16 + VALUE_PATTERN = re.compile(r"^[a-zA-Z0-9._:%-]*[a-zA-Z0-9._-]$") 17 + 18 + __method: DIDMethod 19 + __value: str 20 + 21 + @overload 22 + def __init__(self, did: str, /): ... 23 + @overload 24 + def __init__(self, method: str | DIDMethod, value: str, /): ... 25 + 26 + def __init__(self, did_or_method: str | DIDMethod, value: str | None = None, /): 27 + if value is None: 28 + if not self.PATTERN.match(did_or_method): 29 + raise ValueError("Invalid DID: does not match pattern") 30 + did_or_method, value = did_or_method[4:].split(":", 1) 31 + else: 32 + if not ( 33 + isinstance(did_or_method, DIDMethod) 34 + or self.METHOD_PATTERN.match(did_or_method) 35 + ): 36 + raise ValueError("Invalid DID: method does not match pattern") 37 + if not self.VALUE_PATTERN.match(value): 38 + raise ValueError("Invalid DID: value does not match pattern") 39 + self.__method = ( 40 + DIDMethod(did_or_method) 41 + if not isinstance(did_or_method, DIDMethod) 42 + else did_or_method 43 + ) 44 + self.__value = value 45 + 46 + @property 47 + def method(self) -> str: 48 + return self.__method 49 + 50 + @property 51 + def value(self) -> str: 52 + return self.__value 53 + 54 + def __str__(self) -> str: 55 + return f"did:{self.__method}:{self.__value}" 56 + 57 + def __repr__(self) -> str: 58 + return str(self)
+46
src/id/handle.py
··· 1 + import re 2 + 3 + 4 + class Handle: 5 + __lexicon_string_format__ = "handle" 6 + 7 + PATTERN = re.compile( 8 + r"^([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?$" 9 + ) 10 + FORBIDDEN_TLDS = [ 11 + ".alt", 12 + ".arpa", 13 + ".example", 14 + ".internal", 15 + ".local", 16 + ".localhost", 17 + ".onion", 18 + ] 19 + 20 + __handle: str 21 + 22 + @property 23 + def valid(self) -> bool: 24 + return self.__handle != "handle.invalid" 25 + 26 + def __init__(self, handle: str): 27 + handle = handle.lower() 28 + if not self.PATTERN.match(handle): 29 + raise ValueError("Invalid Handle: does not match pattern") 30 + if any(handle.endswith(tld) for tld in self.FORBIDDEN_TLDS): 31 + raise ValueError("Invalid Handle: uses forbidden tld") 32 + if handle.endswith(".invalid") and not handle == "handle.invalid": 33 + raise ValueError( 34 + "Invalid Handle: '.invalid' tld may only be used with 'handle.invalid'" 35 + ) 36 + self.__handle = handle 37 + 38 + @property 39 + def labels(self) -> list[str]: 40 + return self.__handle.split(".") 41 + 42 + def __str__(self) -> str: 43 + return self.__handle 44 + 45 + def __repr__(self) -> str: 46 + return f"Handle({str(self)})"
+12
src/id/literal.py
··· 1 + class Literal: 2 + __literal: str 3 + 4 + def __init__(self, literal: str): 5 + self.__record_key_type__ = "literal:" + literal 6 + self.__literal = literal 7 + 8 + def __str__(self) -> str: 9 + return self.__literal 10 + 11 + def __repr__(self) -> str: 12 + return f"Literal({self.__literal})"
+73
src/id/nsid.py
··· 1 + import re 2 + from typing import Iterable, overload 3 + 4 + 5 + class NSID: 6 + __lexicon_string_format__ = "nsid" 7 + 8 + PATTERN = re.compile( 9 + r"^[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(\.[a-zA-Z]([a-zA-Z0-9]{0,62})?)$" 10 + ) 11 + FRAGMENT_PATTERN = re.compile(r"(\.[a-zA-Z]([a-zA-Z0-9]{0,62})?)") 12 + 13 + __segments: list[str] 14 + __fragment: str | None 15 + 16 + @overload 17 + def __init__(self, nsid: str) -> None: ... 18 + @overload 19 + def __init__(self, nsid: Iterable[str]) -> None: ... 20 + 21 + def __init__(self, nsid: str | Iterable[str]) -> None: 22 + if not isinstance(nsid, str): 23 + nsid = ".".join(nsid) 24 + if "#" in nsid: 25 + nsid, self.__fragment = nsid.rsplit("#", 1) 26 + if not self.FRAGMENT_PATTERN.match(self.__fragment): 27 + raise ValueError("Invalid NSID: fragment does not match pattern") 28 + else: 29 + self.__fragment = None 30 + if not self.PATTERN.match(nsid): 31 + raise ValueError("Invalid NSID: does not match pattern") 32 + self.__segments = nsid.split(".") 33 + 34 + @property 35 + def segments(self) -> list[str]: 36 + return self.__segments 37 + 38 + @property 39 + def fragment(self) -> str | None: 40 + return self.__fragment 41 + 42 + @property 43 + def domain_authority(self) -> list[str]: 44 + return self.__segments[:-1] 45 + 46 + @property 47 + def name(self) -> str: 48 + return self.__segments[-1] 49 + 50 + def __str__(self) -> str: 51 + return ".".join(self.__segments) + ( 52 + "#" + self.__fragment if self.__fragment is not None else "" 53 + ) 54 + 55 + def __repr__(self) -> str: 56 + return f"NSID({str(self)})" 57 + 58 + 59 + class NSIDGlob(NSID): 60 + PATTERN = re.compile( 61 + r"^([a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(\.([a-zA-Z]([a-zA-Z0-9]{0,62})?|\*))|\*)$" 62 + ) 63 + 64 + @classmethod 65 + def any(cls): 66 + return cls("*") 67 + 68 + @classmethod 69 + def sub_of(cls, nsid: NSID): 70 + return cls(str(nsid) + ".*") 71 + 72 + def matches(self, nsid: NSID): 73 + return str(nsid).startswith(str(self)[:-1])
+110
src/id/tid.py
··· 1 + import base64 2 + import enum 3 + import random 4 + import re 5 + import time 6 + from typing import overload 7 + 8 + from ..coding import base32_sortable 9 + 10 + 11 + LAST_GENERATED_TID = 0 12 + CLOCK_IDENTIFIER = random.randint(0, ~-(1 << 10)) 13 + 14 + 15 + class TID: 16 + __lexicon_string_format__ = "tid" 17 + 18 + PATTERN = re.compile(r"^[234567abcdefghij][234567abcdefghijklmnopqrstuvwxyz]{12}$") 19 + 20 + __value: int 21 + 22 + def __init__(self, tid: bytes | int | str, /): 23 + if isinstance(tid, bytes): 24 + tid = int.from_bytes(tid, "big", signed=False) 25 + if isinstance(tid, str): 26 + if not self.PATTERN.match(tid): 27 + raise ValueError("Invalid TID: does not match pattern") 28 + tid = base32_sortable.decode_int(tid) 29 + if not isinstance(tid, int): 30 + raise TypeError( 31 + f"Invalid TID: cannot construct from '{type(tid).__name__}'" 32 + ) 33 + if tid >> 63: 34 + raise ValueError("Invalid TID: first bytes high bit must be 0") 35 + self.__value = tid 36 + 37 + @property 38 + def clock_identifier(self) -> int: 39 + return self.__value & ~-(1 << 10) 40 + 41 + @property 42 + def timestamp(self) -> int: 43 + return self.__value >> 10 44 + 45 + @property 46 + def value(self) -> int: 47 + return self.__value 48 + 49 + def as_bytes(self) -> bytes: 50 + return self.__value.to_bytes(8, "big", signed=False) 51 + 52 + def __str__(self) -> str: 53 + return f"{base32_sortable.encode_int(self.__value, 64)}" 54 + 55 + def __repr__(self) -> str: 56 + return f"TID({self.__value})" 57 + 58 + def __lt__(self, other) -> bool: 59 + if not isinstance(other, TID): 60 + raise TypeError( 61 + f"Cannot compare '{type(self).__name__}' to '{type(other).__name__}'" 62 + ) 63 + return self.__value < other.__value 64 + 65 + def __le__(self, other) -> bool: 66 + if not isinstance(other, TID): 67 + raise TypeError( 68 + f"Cannot compare '{type(self).__name__}' to '{type(other).__name__}'" 69 + ) 70 + return self.__value <= other.__value 71 + 72 + def __gt__(self, other) -> bool: 73 + if not isinstance(other, TID): 74 + raise TypeError( 75 + f"Cannot compare '{type(self).__name__}' to '{type(other).__name__}'" 76 + ) 77 + return self.__value > other.__value 78 + 79 + def __ge__(self, other) -> bool: 80 + if not isinstance(other, TID): 81 + raise TypeError( 82 + f"Cannot compare '{type(self).__name__}' to '{type(other).__name__}'" 83 + ) 84 + return self.__value >= other.__value 85 + 86 + def __eq__(self, other) -> bool: 87 + if not isinstance(other, TID): 88 + raise TypeError( 89 + f"Cannot compare '{type(self).__name__}' to '{type(other).__name__}'" 90 + ) 91 + return self.__value == other.__value 92 + 93 + def __neq__(self, other: "TID") -> bool: 94 + if not isinstance(other, TID): 95 + raise TypeError( 96 + f"Cannot compare '{type(self).__name__}' to '{type(other).__name__}'" 97 + ) 98 + return self.__value != other.__value 99 + 100 + @classmethod 101 + def now(cls: "type[TID]") -> "TID": 102 + global LAST_GENERATED_TID 103 + 104 + timestamp = time.monotonic_ns() // 1000 105 + if timestamp <= LAST_GENERATED_TID: 106 + LAST_GENERATED_TID += 1 107 + timestamp = LAST_GENERATED_TID 108 + 109 + value = ((timestamp & ~-(1 << 53)) << 10) + CLOCK_IDENTIFIER 110 + return cls(value)
+4
src/lexicon/__init__.py
··· 1 + from .lexicon import schema_object, lexicon 2 + 3 + 4 + __all__ = ["schema_object", "lexicon"]
+244
src/lexicon/codegen.py
··· 1 + import glob 2 + import os 3 + import pathlib 4 + from typing import Literal 5 + import ast 6 + from src.id.nsid import NSID 7 + from src.lexicon.lexicon import ( 8 + LLexicon, 9 + LObject, 10 + LParams, 11 + LQuery, 12 + SchemaObject, 13 + lexicon, 14 + ) 15 + 16 + 17 + TYPE_MAP: dict[str, str] = { 18 + "null": "None", 19 + "boolean": "bool", 20 + "integer": "int", 21 + "string": "str", 22 + "bytes": "bytes", 23 + # "cid-link": ..., 24 + # "blob": ..., 25 + "array": "list", 26 + "object": "object", 27 + # "params": ..., 28 + "token": "str", 29 + # "ref": ..., 30 + # "union": ..., 31 + # "unknown": ..., 32 + # "record": ..., 33 + # "query": ..., 34 + # "procedure": ..., 35 + # "subscription": ..., 36 + } 37 + 38 + LEXICON_STORE = pathlib.Path.cwd() / "lex" 39 + 40 + 41 + def content_for_type(fragment: SchemaObject): 42 + if isinstance(fragment, LObject): 43 + return [ 44 + ast.AnnAssign( 45 + target=ast.Name(field), 46 + annotation=ast.Constant( 47 + TYPE_MAP[schema.__lexicon_type__] 48 + + (" | None" if field in (fragment.nullables or []) else "") 49 + ), 50 + value=None, 51 + simple=True, 52 + ) 53 + for field, schema in fragment.properties.items() 54 + ] or [ast.Pass()] 55 + if isinstance(fragment, LParams): 56 + return [ 57 + ast.AnnAssign( 58 + target=ast.Name(field), 59 + annotation=ast.Constant( 60 + TYPE_MAP[schema.__lexicon_type__] 61 + + (" | None" if field not in (fragment.required or []) else "") 62 + ), 63 + value=None, 64 + simple=True, 65 + ) 66 + for field, schema in fragment.properties.items() 67 + ] or [ast.Pass()] 68 + if isinstance(fragment, LQuery): 69 + return [] 70 + return [ast.Pass()] 71 + 72 + 73 + def create_lexicon(lexicon: LLexicon): 74 + lex_nsid = str(lexicon.id).replace("-", "_") 75 + 76 + body: list[ast.stmt] = [ 77 + ast.ImportFrom( 78 + "src.lexicon.model", 79 + [ast.alias("BaseFragment"), ast.alias("BaseExport")], 80 + level=0, 81 + lineno=0, 82 + ), 83 + ] 84 + names_for_main = [] 85 + for key, fragment in sorted(lexicon.defs.items(), key=lambda x: x[0] == "main"): 86 + if key == "main": 87 + continue 88 + names_for_main.append(key) 89 + body.append( 90 + ast.ClassDef( 91 + bases=[ast.Name("BaseFragment")], 92 + name="_meta_" + key, 93 + body=[ 94 + ast.Assign( 95 + targets=[ast.Name("__lexicon__")], 96 + value=ast.Constant(lex_nsid), 97 + lineno=0, 98 + ), 99 + ast.Assign( 100 + targets=[ast.Name("__fragment__")], 101 + value=ast.Constant(key), 102 + lineno=0, 103 + ), 104 + ], 105 + ) 106 + ) 107 + body.append( 108 + ast.ClassDef( 109 + "_frag_" + key, 110 + bases=[ast.Name(TYPE_MAP[fragment.__lexicon_type__])], 111 + keywords=[ast.keyword("metaclass", ast.Name("_meta_" + key))], 112 + body=[ 113 + ast.Assign( 114 + [ast.Name("__qualname__")], ast.Constant(key), lineno=0 115 + ), 116 + *content_for_type(fragment), 117 + ], 118 + ), 119 + ) 120 + if "main" in lexicon.defs: 121 + fragment = lexicon.defs["main"] 122 + body.append( 123 + ast.ClassDef( 124 + bases=[ast.Name("BaseFragment")], 125 + name="_meta_main", 126 + body=[ 127 + ast.Assign( 128 + targets=[ast.Name("__lexicon__")], 129 + value=ast.Constant(lex_nsid), 130 + lineno=0, 131 + ), 132 + ast.Assign( 133 + targets=[ast.Name("__fragment__")], 134 + value=ast.Constant("main"), 135 + lineno=0, 136 + ), 137 + *[ 138 + ast.Assign( 139 + targets=[ast.Name(name)], 140 + value=ast.Name("_frag_" + name), 141 + lineno=0, 142 + ) 143 + for name in names_for_main 144 + ], 145 + ], 146 + ) 147 + ) 148 + body.append( 149 + ast.ClassDef( 150 + "_frag_main", 151 + bases=[ast.Name(TYPE_MAP[fragment.__lexicon_type__])], 152 + keywords=[ast.keyword("metaclass", ast.Name("_meta_main"))], 153 + body=[ 154 + ast.Assign( 155 + [ast.Name("__qualname__")], ast.Constant("main"), lineno=0 156 + ), 157 + *content_for_type(fragment), 158 + ], 159 + ), 160 + ) 161 + body.append( 162 + ast.Assign( 163 + targets=[ast.Name(lexicon.id.name)], 164 + value=ast.Name("_frag_main"), 165 + lineno=0, 166 + ) 167 + ) 168 + else: 169 + body.append( 170 + ast.ClassDef( 171 + bases=[ast.Name("BaseExport")], 172 + name="_export", 173 + body=[ 174 + ast.Assign( 175 + targets=[ast.Name("__lexicon__")], 176 + value=ast.Constant(lex_nsid), 177 + lineno=0, 178 + ), 179 + *[ 180 + ast.Assign( 181 + targets=[ast.Name(name)], 182 + value=ast.Name("_frag_" + name), 183 + lineno=0, 184 + ) 185 + for name in names_for_main 186 + ], 187 + ], 188 + ) 189 + ) 190 + body.append( 191 + ast.Assign( 192 + targets=[ast.Name(lexicon.id.name)], 193 + value=ast.Call(ast.Name("_export")), 194 + lineno=0, 195 + ) 196 + ) 197 + return ast.Module( 198 + body=body, 199 + ) 200 + 201 + 202 + def convert_lexicon(lexicon: LLexicon): 203 + lexicon_authority_store = pathlib.Path(LEXICON_STORE, *lexicon.id.domain_authority) 204 + os.makedirs(lexicon_authority_store, exist_ok=True) 205 + lexicon_store = pathlib.Path(LEXICON_STORE, *lexicon.id.segments).with_suffix(".py") 206 + with lexicon_store.open("w", encoding="utf-8") as file: 207 + file.write(ast.unparse(create_lexicon(lexicon))) 208 + 209 + 210 + def regenerate_dunder_inits(changed: NSID): 211 + segments = ["", *changed.domain_authority] 212 + lng = len(segments) - 1 213 + for idx in range(len(segments)): 214 + segment_path = pathlib.Path(LEXICON_STORE, *segments[: idx + 1]) 215 + files = list(segment_path.glob("*")) 216 + file_content = ast.unparse( 217 + ast.Module( 218 + [ 219 + *[ 220 + ast.ImportFrom( 221 + ("." if idx < lng else "." + file.with_suffix("").name), 222 + [ast.alias(file.with_suffix("").name)], 223 + level=0, 224 + lineno=0, 225 + ) 226 + for file in files 227 + if not file.name.startswith("__") 228 + ], 229 + ast.Assign( 230 + [ast.Name("__all__")], 231 + ast.List( 232 + [ 233 + ast.Constant(file.with_suffix("").name) 234 + for file in files 235 + if not file.name.startswith("__") 236 + ] 237 + ), 238 + lineno=0, 239 + ), 240 + ] 241 + ) 242 + ) 243 + with (segment_path / "__init__.py").open("w", encoding="utf-8") as fw: 244 + fw.write(file_content)
+645
src/lexicon/lexicon.py
··· 1 + import abc 2 + import re 3 + 4 + from ..id.nsid import NSID 5 + 6 + 7 + type JSONObject = None | int | bool | str | list[JSONObject] | dict[str, JSONObject] 8 + 9 + 10 + class SchemaObject(abc.ABC): 11 + __lexicon_type__: str 12 + description: str | None = None 13 + 14 + def __init__(self, obj: dict[str, JSONObject]): 15 + if not isinstance(obj, dict): 16 + raise TypeError("Lexicon typedef must be object") 17 + type_ = obj.pop("type", None) 18 + if not self.__lexicon_type__.startswith("__"): 19 + assert ( 20 + type_ == self.__lexicon_type__ 21 + ), f"Cannot interpret obj of type '{type_} as type '{self.__lexicon_type__}'" 22 + 23 + description = obj.pop("description", None) 24 + assert description is None or isinstance( 25 + description, str 26 + ), "'description' must be a string" 27 + self.description = description 28 + 29 + self.initialize(obj) 30 + assert not obj, f"Unused keys in obj of type '{self.__lexicon_type__}': {", ".join(obj.keys())}" 31 + 32 + @abc.abstractmethod 33 + def initialize(self, obj: dict[str, JSONObject]) -> None: ... 34 + 35 + 36 + class LNull(SchemaObject): 37 + __lexicon_type__: str = "null" 38 + 39 + def initialize(self, obj: dict[str, JSONObject]) -> None: 40 + pass 41 + 42 + 43 + class LBoolean(SchemaObject): 44 + __lexicon_type__: str = "boolean" 45 + 46 + const: bool | None = None 47 + default: bool | None = None 48 + 49 + def initialize(self, obj: dict[str, JSONObject]) -> None: 50 + # Default is bool or None 51 + default = obj.pop("default", None) 52 + assert default is None or isinstance( 53 + default, bool 54 + ), "'default' must be a boolean" 55 + self.default = default 56 + 57 + # Const is bool or None 58 + const = obj.pop("const", None) 59 + assert const is None or isinstance(const, bool), "'const' must be a boolean" 60 + self.const = const 61 + 62 + 63 + class LInteger(SchemaObject): 64 + __lexicon_type__: str = "integer" 65 + 66 + const: int | None = None 67 + default: int | None = None 68 + minimum: int | None = None 69 + maximum: int | None = None 70 + enum: list[int] | None = None 71 + 72 + def initialize(self, obj: dict[str, JSONObject]) -> None: 73 + # Minimum is int or None 74 + minimum = obj.pop("minimum", None) 75 + assert minimum is None or isinstance( 76 + minimum, int 77 + ), "'minimum' must be an integer" 78 + self.minimum = minimum 79 + 80 + # Maximum is int or None 81 + maximum = obj.pop("maximum", None) 82 + assert maximum is None or isinstance( 83 + maximum, int 84 + ), "'maximum' must be an integer" 85 + self.maximum = maximum 86 + 87 + # Enum is list[int] or None 88 + enum = obj.pop("enum", None) 89 + assert enum is None or ( 90 + isinstance(enum, list) and all(isinstance(element, int) for element in enum) 91 + ), "'enum' must be a list of integers" 92 + self.enum = enum # type: ignore 93 + 94 + # Default is int or None 95 + default = obj.pop("minimum", None) 96 + assert default is None or isinstance( 97 + default, int 98 + ), "'default' must be an integer" 99 + self.default = default 100 + 101 + # Const is int or None 102 + const = obj.pop("const", None) 103 + assert const is None or isinstance(const, int), "'const' must be an integer" 104 + self.const = const 105 + 106 + 107 + STRING_FORMATS: set[str] = { 108 + "at-identifier", 109 + "at-uri", 110 + "cid", 111 + "datetime", 112 + "did", 113 + "handle", 114 + "nsid", 115 + "tid", 116 + "record-key", 117 + "uri", 118 + "language", 119 + } 120 + 121 + 122 + class LString(SchemaObject): 123 + __lexicon_type__: str = "string" 124 + 125 + const: str | None = None 126 + default: str | None = None 127 + format: str | None = None 128 + min_length: int | None = None 129 + max_length: int | None = None 130 + min_graphemes: int | None = None 131 + max_graphemes: int | None = None 132 + enum: list[str] | None = None 133 + known_values: list[str] | None = None 134 + 135 + def initialize(self, obj: dict[str, JSONObject]) -> None: 136 + # Format is str or None 137 + format_ = obj.pop("format", None) 138 + assert ( 139 + format_ is None or format_ in STRING_FORMATS 140 + ), f"'format' must be one of: {", ".join(STRING_FORMATS)}" 141 + self.format = format_ 142 + 143 + # MinLength is int or None 144 + min_length = obj.pop("minLength", None) 145 + assert min_length is None or isinstance( 146 + min_length, int 147 + ), "'minLength' must be an integer" 148 + self.min_length = min_length 149 + 150 + # MaxLength is int or None 151 + max_length = obj.pop("maxLength", None) 152 + assert max_length is None or isinstance( 153 + max_length, int 154 + ), "'maxLength' must be an integer" 155 + self.max_length = max_length 156 + 157 + # MinGraphemes is int or None 158 + min_graphemes = obj.pop("minGraphemes", None) 159 + assert min_graphemes is None or isinstance( 160 + min_graphemes, int 161 + ), "'minGraphemes' must be an integer" 162 + self.min_graphemes = min_graphemes 163 + 164 + # MaxGraphemes is int or None 165 + max_graphemes = obj.pop("maxGraphemes", None) 166 + assert max_graphemes is None or isinstance( 167 + max_graphemes, int 168 + ), "'maxGraphemes' must be an integer" 169 + self.max_graphemes = max_graphemes 170 + 171 + # KnownValues is list[str] or None 172 + known_values = obj.pop("knownValues", None) 173 + assert known_values is None or ( 174 + isinstance(known_values, list) 175 + and all(isinstance(element, str) for element in known_values) 176 + ), "'enum' must be a list of integers" 177 + self.known_values = known_values # type: ignore 178 + 179 + # Enum is list[str] or None 180 + enum = obj.pop("enum", None) 181 + assert enum is None or ( 182 + isinstance(enum, list) and all(isinstance(element, str) for element in enum) 183 + ), "'enum' must be a list of integers" 184 + self.enum = enum # type: ignore 185 + 186 + # Default is str or None 187 + default = obj.pop("minimum", None) 188 + assert default is None or isinstance(default, str), "'default' must be a string" 189 + self.default = default 190 + 191 + # Const is str or None 192 + const = obj.pop("const", None) 193 + assert const is None or isinstance(const, str), "'const' must be a string" 194 + self.const = const 195 + 196 + 197 + class LBytes(SchemaObject): 198 + __lexicon_type__: str = "bytes" 199 + 200 + min_length: int | None = None 201 + max_length: int | None = None 202 + 203 + def initialize(self, obj: dict[str, JSONObject]) -> None: 204 + # MinLength is int or None 205 + min_length = obj.pop("minLength", None) 206 + assert min_length is None or isinstance( 207 + min_length, int 208 + ), "'minLength' must be an integer" 209 + self.min_length = min_length 210 + 211 + # MaxLength is int or None 212 + max_length = obj.pop("maxLength", None) 213 + assert max_length is None or isinstance( 214 + max_length, int 215 + ), "'maxLength' must be an integer" 216 + self.max_length = max_length 217 + 218 + 219 + class LCIDLink(SchemaObject): 220 + __lexicon_type__: str = "cid-link" 221 + 222 + def initialize(self, obj: dict[str, JSONObject]) -> None: ... 223 + 224 + 225 + class LArray(SchemaObject): 226 + __lexicon_type__: str = "array" 227 + 228 + items: SchemaObject 229 + min_length: int | None = None 230 + max_length: int | None = None 231 + 232 + def initialize(self, obj: dict[str, JSONObject]) -> None: 233 + # MinLength is int or None 234 + min_length = obj.pop("minLength", None) 235 + assert min_length is None or isinstance( 236 + min_length, int 237 + ), "'minLength' must be an integer" 238 + self.min_length = min_length 239 + 240 + # MaxLength is int or None 241 + max_length = obj.pop("maxLength", None) 242 + assert max_length is None or isinstance( 243 + max_length, int 244 + ), "'maxLength' must be an integer" 245 + self.max_length = max_length 246 + 247 + # Items is SchemaObject 248 + items = obj.pop("items") 249 + assert isinstance(items, dict), "'items' must be a valid schema definition" 250 + items_schema = schema_object(items) 251 + self.items = items_schema 252 + 253 + 254 + class LObject(SchemaObject): 255 + __lexicon_type__: str = "object" 256 + 257 + properties: dict[str, "SchemaObject"] 258 + required: list[str] | None = None 259 + nullable: list[str] | None = None 260 + 261 + def initialize(self, obj: dict[str, JSONObject]) -> None: 262 + # Properties is SchemaObject 263 + properties = obj.pop("properties", {}) 264 + assert isinstance(properties, dict) and all( 265 + isinstance(key, str) and isinstance(value, dict) 266 + for key, value in properties.items() 267 + ), "'properties' must be a record of valid schema definitions" 268 + properties_schemas = { 269 + key: schema_object(value) # type: ignore 270 + for key, value in properties.items() 271 + } 272 + self.properties = properties_schemas 273 + 274 + # Required is list[keyof[properties]] 275 + required = obj.pop("required", []) 276 + assert isinstance(required, list) and all( 277 + key in self.properties for key in required 278 + ), "'required' must be a list of keys in 'properties'" 279 + self.required = required # type: ignore 280 + 281 + # Nullables is list[keyof[properties]] 282 + nullables = obj.pop("nullables", []) 283 + assert isinstance(nullables, list) and all( 284 + key in self.properties for key in nullables 285 + ), "'nullables' must be a list of keys in 'properties'" 286 + self.nullables = nullables 287 + 288 + 289 + class LBlob(SchemaObject): 290 + __lexicon_type__: str = "blob" 291 + 292 + accept: list[str] | None 293 + max_size: int | None = None 294 + 295 + def initialize(self, obj: dict[str, JSONObject]) -> None: 296 + # Accept is list[str] 297 + accept = obj.pop("accept", []) 298 + assert isinstance(accept, list) and all( 299 + isinstance(value, str) for value in accept 300 + ), "'accept' must be a list of strings" 301 + self.accept = accept # type: ignore 302 + 303 + # MaxSize is int 304 + max_size = obj.pop("maxSize", []) 305 + assert isinstance(max_size, int), "'maxSize' must be an integer" 306 + self.nullables = max_size 307 + 308 + 309 + class LParams(SchemaObject): 310 + __lexicon_type__: str = "params" 311 + 312 + required: list[str] | None = None 313 + properties: dict[str, "SchemaObject"] 314 + 315 + def initialize(self, obj: dict[str, JSONObject]) -> None: 316 + # Properties is SchemaObject 317 + properties = obj.pop("properties", {}) 318 + assert isinstance(properties, dict) and all( 319 + isinstance(key, str) and isinstance(value, dict) 320 + for key, value in properties.items() 321 + ), "'properties' must be a record of valid schema definitions" 322 + properties_schemas = { 323 + key: schema_object(value) # type: ignore 324 + for key, value in properties.items() 325 + } 326 + self.properties = properties_schemas 327 + 328 + # Required is list[keyof[properties]] 329 + required = obj.pop("required", []) 330 + assert isinstance(required, list) and all( 331 + key in self.properties for key in required 332 + ), "'required' must be a list of keys in 'properties'" 333 + self.required = required # type: ignore 334 + 335 + 336 + class LToken(SchemaObject): 337 + __lexicon_type__: str = "token" 338 + 339 + def initialize(self, obj: dict[str, JSONObject]) -> None: 340 + pass 341 + 342 + 343 + REF_PATTERN = re.compile( 344 + r"^([a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(\.[a-zA-Z]([a-zA-Z0-9]{0,62})?))?(#[a-zA-Z]([a-zA-Z0-9]{0,62})?)?$" 345 + ) 346 + 347 + 348 + class LRef(SchemaObject): 349 + __lexicon_type__: str = "ref" 350 + 351 + ref: str 352 + 353 + def initialize(self, obj: dict[str, JSONObject]): 354 + # Ref is NSID 355 + ref = obj.pop("ref") 356 + assert ( 357 + isinstance(ref, str) and len(ref) and REF_PATTERN.match(ref) 358 + ), "'ref' must be a valid nsid ref" 359 + self.ref = ref 360 + 361 + 362 + class LUnion(SchemaObject): 363 + __lexicon_type__ = "union" 364 + 365 + refs: list[str] 366 + closed: bool = False 367 + 368 + def initialize(self, obj: dict[str, JSONObject]): 369 + # Refs is list[NSID] 370 + refs = obj.pop("refs") 371 + assert isinstance(refs, list) and all( 372 + isinstance(ref, str) and REF_PATTERN.match(ref) for ref in refs 373 + ), "'ref' must be a list of valid nsid refs" 374 + self.refs = refs # type: ignore 375 + 376 + # Closed is bool 377 + closed = obj.pop("closed", False) 378 + assert isinstance(closed, bool), "'closed' must be a boolean" 379 + self.closed = closed 380 + 381 + 382 + class LUnkown(SchemaObject): 383 + __lexicon_type__ = "unkown" 384 + 385 + def initialize(self, obj: dict[str, JSONObject]): 386 + pass 387 + 388 + 389 + RECORD_KEY_TYPES = { 390 + "nsid", 391 + "any", 392 + } 393 + 394 + 395 + class LRecord(SchemaObject): 396 + __lexicon_type__ = "unkown" 397 + 398 + key: str 399 + record: LObject 400 + 401 + def initialize(self, obj: dict[str, JSONObject]): 402 + # key is Record Key Type 403 + key = obj.pop("key") 404 + assert ( 405 + isinstance(key, str) 406 + and ( 407 + key in RECORD_KEY_TYPES or key.startswith("literal:") 408 + ) # TODO: Improve 'literal' filter 409 + ), "'key' must be a valid record key type" 410 + self.key = key 411 + 412 + # Record is LObject 413 + record = obj.pop("record") 414 + assert ( 415 + isinstance(record, dict) and record.get("type") == "object" 416 + ), "'record' must be an object" 417 + self.record = LObject(record) 418 + 419 + 420 + class LQueryOutput(SchemaObject): 421 + __lexicon_type__ = "__query_output" 422 + 423 + encoding: str 424 + schema: SchemaObject 425 + 426 + def initialize(self, obj: dict[str, JSONObject]): 427 + # Encoding is str 428 + encoding = obj.pop("encoding") 429 + assert isinstance(encoding, str), "'encoding' must be a string" 430 + self.encoding = encoding 431 + 432 + # Schema is LObject 433 + schema = obj.pop("schema") 434 + assert isinstance(schema, dict) and schema.get("type") in { 435 + "object", 436 + "ref", 437 + "union", 438 + }, "'record' must be an object, a ref, or a union of refs" 439 + self.schema = schema_object(schema) 440 + 441 + 442 + class LQueryError(SchemaObject): 443 + __lexicon_type__ = "__query_error" 444 + 445 + name: str 446 + 447 + def initialize(self, obj: dict[str, JSONObject]): 448 + # Name is str 449 + name = obj.pop("name") 450 + assert isinstance(name, str), "'name' must be a string" 451 + self.name = name 452 + 453 + 454 + class LQuery(SchemaObject): 455 + __lexicon_type__ = "query" 456 + 457 + parameters: LParams | None = None 458 + output: LQueryOutput | None = None 459 + errors: list[LQueryError] | None = None 460 + 461 + def initialize(self, obj: dict[str, JSONObject]): 462 + # parameters is LParams 463 + parameters = obj.pop("parameters", None) 464 + if parameters is not None: 465 + assert ( 466 + isinstance(parameters, dict) and parameters.get("type") == "params" 467 + ), "'parameters' must be a params object" 468 + self.parameters = LParams(parameters) 469 + else: 470 + self.parameters = None 471 + 472 + # output is LQueryOutput 473 + output = obj.pop("output", None) 474 + if output is not None: 475 + assert isinstance(output, dict), "'output' must be an object" 476 + self.output = LQueryOutput(output) 477 + else: 478 + self.output = None 479 + 480 + # errors is list[LQueryError] 481 + errors = obj.pop("errors", None) 482 + if errors is not None: 483 + assert isinstance(errors, list) and all( 484 + isinstance(error, dict) for error in errors 485 + ), "'errors' must be a list of objects" 486 + self.errors = [ 487 + LQueryError(error) # type:ignore 488 + for error in errors 489 + ] 490 + else: 491 + self.errors = None 492 + 493 + 494 + class LProcedure(LQuery): 495 + __lexicon_type__ = "procedure" 496 + 497 + input: LQueryOutput | None = None 498 + 499 + def initialize(self, obj: dict[str, JSONObject]): 500 + super().initialize(obj) 501 + 502 + # output is LQueryOutput 503 + input_ = obj.pop("input", None) 504 + if input_ is not None: 505 + assert isinstance(input_, dict), "'input' must be an object" 506 + self.input = LQueryOutput(input_) 507 + else: 508 + self.input = None 509 + 510 + 511 + class LSubscriptionMessage(SchemaObject): 512 + __lexicon_type__ = "__subscription_message" 513 + 514 + schema: SchemaObject | None = None 515 + 516 + def initialize(self, obj: dict[str, JSONObject]): 517 + # Schema is LObject 518 + schema = obj.pop("schema") 519 + assert ( 520 + isinstance(schema, dict) and schema.get("type") == "union" 521 + ), "'record' must be a union of refs" 522 + self.schema = schema_object(schema) 523 + 524 + 525 + class LSubscription(SchemaObject): 526 + __lexicon_type__ = "subscription" 527 + 528 + parameters: LParams | None = None 529 + message: LSubscriptionMessage | None = None 530 + errors: list[LQueryError] | None = None 531 + 532 + def initialize(self, obj: dict[str, JSONObject]): 533 + # parameters is LParams 534 + parameters = obj.pop("parameters", None) 535 + if parameters is not None: 536 + assert ( 537 + isinstance(parameters, dict) and parameters.get("type") == "params" 538 + ), "'parameters' must be a params object" 539 + self.parameters = LParams(parameters) 540 + else: 541 + self.parameters = None 542 + 543 + # message is LSubscriptionMessage 544 + message = obj.pop("message", None) 545 + if message is not None: 546 + assert isinstance(message, dict), "'output' must be an object" 547 + self.message = LSubscriptionMessage(message) 548 + else: 549 + self.message = None 550 + 551 + # errors is list[LQueryError] 552 + errors = obj.pop("errors", None) 553 + if errors is not None: 554 + assert isinstance(errors, list) and all( 555 + isinstance(error, dict) for error in errors 556 + ), "'errors' must be a list of objects" 557 + self.errors = [ 558 + LQueryError(error) # type:ignore 559 + for error in errors 560 + ] 561 + else: 562 + self.errors = None 563 + 564 + 565 + NSID_PATTERN = re.compile( 566 + r"^[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(\.[a-zA-Z]([a-zA-Z0-9]{0,62})?)$" 567 + ) 568 + NSID_FRAGMENT_PATTERN = re.compile(r"^[a-zA-Z]([a-zA-Z0-9]{0,62})?$") 569 + 570 + 571 + class LLexicon(SchemaObject): 572 + __lexicon_type__: str = "__lexicon" 573 + 574 + lexicon: int = 1 575 + id: NSID 576 + defs: dict[str, SchemaObject] 577 + 578 + def initialize(self, obj: dict[str, JSONObject]) -> None: 579 + # Lexicon is Literal[1] 580 + lexicon = obj.pop("lexicon") 581 + assert lexicon == 1, "'lexicon' must be 1" 582 + self.lexicon = lexicon # type: ignore 583 + 584 + # Id is NSID 585 + id_ = obj.pop("id") 586 + assert isinstance(id_, str) and NSID_PATTERN.match( 587 + id_ 588 + ), "'id' must be a valid nsid" 589 + self.id = NSID(id_) 590 + 591 + # Defs is dict[Fragment, SchemaObject] 592 + defs = obj.pop("defs") 593 + assert isinstance( 594 + defs, dict 595 + ), "'defs' must be a record of valid schema definitions" 596 + assert all( 597 + isinstance(key, str) and NSID_FRAGMENT_PATTERN.match(key) 598 + for key in defs.keys() 599 + ), "'defs's keys must be valid nsid fragments" 600 + assert all( 601 + isinstance(value, dict) for value in defs.values() 602 + ), "'defs' must be a record of valid schema definitions" 603 + assert all( 604 + value.get("type") not in {"record", "query", "procedure", "subscription"} 605 + or key == "main" 606 + for key, value in defs.items() 607 + ), "primary data types must be in the 'main' fragment" 608 + defs_obj = { 609 + key: schema_object(value) # type: ignore 610 + for key, value in defs.items() 611 + } 612 + self.defs = defs_obj 613 + 614 + 615 + TYPE_MAP: dict[str, type[SchemaObject]] = { 616 + "null": LNull, 617 + "boolean": LBoolean, 618 + "integer": LInteger, 619 + "string": LString, 620 + "bytes": LBytes, 621 + "cid-link": LCIDLink, 622 + "blob": LBlob, 623 + "array": LArray, 624 + "object": LObject, 625 + "params": LParams, 626 + "token": LToken, 627 + "ref": LRef, 628 + "union": LUnion, 629 + "unknown": LUnkown, 630 + "record": LRecord, 631 + "query": LQuery, 632 + "procedure": LProcedure, 633 + "subscription": LSubscription, 634 + } 635 + 636 + 637 + def schema_object(obj: dict[str, JSONObject]): 638 + type_ = obj.get("type") 639 + if type_ not in TYPE_MAP: 640 + raise TypeError(f"Cannot find lexcion type '{type_}'") 641 + return TYPE_MAP[type_](obj) 642 + 643 + 644 + def lexicon(obj: dict[str, JSONObject]): 645 + return LLexicon(obj)
+18
src/lexicon/model.py
··· 1 + class BaseFragment(type): 2 + __fragment__: str 3 + __lexicon__: str 4 + 5 + def __repr__(self): 6 + if self.__fragment__ == "main": 7 + return self.__lexicon__ 8 + return f"{self.__lexicon__}#{self.__fragment__}" 9 + 10 + 11 + class BaseExport: 12 + __lexicon__: str 13 + 14 + # def __init__(self): 15 + # raise TypeError("Cannot instantiate export") 16 + 17 + def __repr__(self): 18 + return f"<fragment collection of lexicon '{self.__lexicon__}'>"
+46
test.py
··· 1 + # import src.id as id 2 + 3 + # a = id.TID("2zzzzzzzzzzzz") 4 + # print(repr(a), a, a.timestamp, a.clock_identifier) 5 + 6 + # print(repr(id.DID("did:web:::::jojojux.de"))) 7 + 8 + # com_bsky = id.NSIDGlob("com.bsky.*") 9 + # com_bsky_feed = id.NSID("com.bsky.feed") 10 + # print(com_bsky.matches(com_bsky_feed)) 11 + 12 + 13 + import base64 14 + from src.lexicon import lexicon 15 + from src.lexicon.lexicon import LQuery 16 + 17 + 18 + lex = lexicon( 19 + { 20 + "lexicon": 1, 21 + "id": "com.example.getProfile", 22 + "defs": { 23 + "main": { 24 + "type": "query", 25 + "parameters": { 26 + "type": "params", 27 + "required": ["user"], 28 + "properties": {"user": {"type": "string"}}, 29 + }, 30 + "output": { 31 + "encoding": "application/json", 32 + "schema": { 33 + "type": "object", 34 + "required": ["did", "name"], 35 + "properties": { 36 + "did": {"type": "string"}, 37 + "name": {"type": "string"}, 38 + "displayName": {"type": "string", "maxLength": 64}, 39 + "description": {"type": "string", "maxLength": 256}, 40 + }, 41 + }, 42 + }, 43 + } 44 + }, 45 + } 46 + )
+29
testt.py
··· 1 + from src.id.nsid import NSID 2 + import src.lexicon.codegen 3 + from src.lexicon import lexicon 4 + 5 + src.lexicon.codegen.convert_lexicon( 6 + lexicon( 7 + { 8 + "lexicon": 1, 9 + "id": "com.example.getProfile", 10 + "defs": { 11 + "main": { 12 + "type": "object", 13 + "properties": { 14 + "test": {"type": "string"}, 15 + "test2": {"type": "array", "items": {"type": "boolean"}}, 16 + }, 17 + "nullables": ["test2"], 18 + }, 19 + "other": {"type": "object", "properties": {"int": {"type": "integer"}}}, 20 + }, 21 + } 22 + ) 23 + ) 24 + src.lexicon.codegen.regenerate_dunder_inits(NSID("com.example.getProfile")) 25 + 26 + import lex 27 + 28 + print(lex.com.example.getProfile()) 29 + print(lex.com.example.getProfile.other)