{"payload":{"feedbackUrl":"https://github.com/orgs/community/discussions/53140","repo":{"id":238583100,"defaultBranch":"main","name":"blue-book","ownerLogin":"lyz-code","currentUserCanPush":false,"isFork":false,"isEmpty":false,"createdAt":"2020-02-06T01:24:08.000Z","ownerAvatar":"https://avatars.githubusercontent.com/u/24810987?v=4","public":true,"private":false,"isOrgOwned":false},"refInfo":{"name":"","listCacheKey":"v0:1651261222.6026912","currentOid":""},"activityList":{"items":[{"before":"a505cebefa4ad2aa25679d1b34b6b6719ebe4036","after":"17ffbe60714d7087fac67c2f3bcd43f052d91b7a","ref":"refs/heads/gh-pages","pushedAt":"2024-08-20T06:47:09.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 9062e23c3cc5f0f8299123624be6e1eb7b924117","shortMessageHtmlLink":"deploy: 9062e23"}},{"before":"9062e23c3cc5f0f8299123624be6e1eb7b924117","after":"9ed96cf306ec619ce9a7ecb9730f88c7bb84078f","ref":"refs/heads/main","pushedAt":"2024-08-20T06:46:46.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"github-actions[bot]","name":null,"path":"/apps/github-actions","primaryAvatarUrl":"https://avatars.githubusercontent.com/in/15368?s=80&v=4"},"commit":{"message":"chore: update dependency, publish newsletters and add the not by ai badge","shortMessageHtmlLink":"chore: update dependency, publish newsletters and add the not by ai b…"}},{"before":"55ce654ad24d264695c0b49c9a5168d32e4598c9","after":"a505cebefa4ad2aa25679d1b34b6b6719ebe4036","ref":"refs/heads/gh-pages","pushedAt":"2024-08-19T06:47:23.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: ca536febb28b5a0c4b10a38cef3db1833b4dcd3d","shortMessageHtmlLink":"deploy: ca536fe"}},{"before":"ca536febb28b5a0c4b10a38cef3db1833b4dcd3d","after":"9062e23c3cc5f0f8299123624be6e1eb7b924117","ref":"refs/heads/main","pushedAt":"2024-08-19T06:46:59.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"github-actions[bot]","name":null,"path":"/apps/github-actions","primaryAvatarUrl":"https://avatars.githubusercontent.com/in/15368?s=80&v=4"},"commit":{"message":"chore: update dependency, publish newsletters and add the not by ai badge","shortMessageHtmlLink":"chore: update dependency, publish newsletters and add the not by ai b…"}},{"before":"a40964b1294d36de4a50c366d136cb156ff84e36","after":"55ce654ad24d264695c0b49c9a5168d32e4598c9","ref":"refs/heads/gh-pages","pushedAt":"2024-08-18T06:45:10.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: ca536febb28b5a0c4b10a38cef3db1833b4dcd3d","shortMessageHtmlLink":"deploy: ca536fe"}},{"before":"79b5576deba69a2c8ce8a4454998d0a200fab507","after":"a40964b1294d36de4a50c366d136cb156ff84e36","ref":"refs/heads/gh-pages","pushedAt":"2024-08-17T06:44:06.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: ca536febb28b5a0c4b10a38cef3db1833b4dcd3d","shortMessageHtmlLink":"deploy: ca536fe"}},{"before":"01f8f88132eb77759b28b1059779b109f0eebf8b","after":"79b5576deba69a2c8ce8a4454998d0a200fab507","ref":"refs/heads/gh-pages","pushedAt":"2024-08-16T06:45:29.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: ca536febb28b5a0c4b10a38cef3db1833b4dcd3d","shortMessageHtmlLink":"deploy: ca536fe"}},{"before":"077c2d4fa62a4f5df8c2dc5b78bd95375afa7270","after":"01f8f88132eb77759b28b1059779b109f0eebf8b","ref":"refs/heads/gh-pages","pushedAt":"2024-08-15T06:46:54.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: d10ff6ab817e44ac038bfd65557a4211c7375604","shortMessageHtmlLink":"deploy: d10ff6a"}},{"before":"d10ff6ab817e44ac038bfd65557a4211c7375604","after":"ca536febb28b5a0c4b10a38cef3db1833b4dcd3d","ref":"refs/heads/main","pushedAt":"2024-08-15T06:46:35.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"github-actions[bot]","name":null,"path":"/apps/github-actions","primaryAvatarUrl":"https://avatars.githubusercontent.com/in/15368?s=80&v=4"},"commit":{"message":"chore: update dependency, publish newsletters and add the not by ai badge","shortMessageHtmlLink":"chore: update dependency, publish newsletters and add the not by ai b…"}},{"before":"8f48f34b562d883954481c1101e6a6d5ca26130a","after":"077c2d4fa62a4f5df8c2dc5b78bd95375afa7270","ref":"refs/heads/gh-pages","pushedAt":"2024-08-14T11:30:59.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: d10ff6ab817e44ac038bfd65557a4211c7375604","shortMessageHtmlLink":"deploy: d10ff6a"}},{"before":"1f09af8b3bd6611b56263aad6865559353fa9f56","after":"d10ff6ab817e44ac038bfd65557a4211c7375604","ref":"refs/heads/main","pushedAt":"2024-08-14T11:16:44.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"feat(anki#Install the official sync server): Install the official sync server\n**Using docker-compose**\n\nOn the server that holds Anki:\n\n* Create the data directories:\n ```bash\n mkdir -p /data/apps/anki/data\n chown -R 1000:1000 /data/apps/anki/data\n ```\n* Copy the `docker/docker-compose.yaml` to `/data/apps/anki`.\n\n ```yaml\n ---\n version: \"3\"\n\n services:\n anki:\n image: jeankhawand/anki-sync-server:24.04.1\n container_name: anki\n restart: always\n volumes:\n - data:/home/anki/.syncserver\n networks:\n - nginx\n env_file:\n - .env\n\n networks:\n nginx:\n external:\n name: nginx\n\n volumes:\n data:\n driver: local\n driver_opts:\n type: none\n o: bind\n device: /data/apps/anki/data\n ```\n* Add your `.env` file with your credentials\n ```\n SYNC_USER1=user:password\n ```\n* Clone the repository at `/data/apps/anki/src`\n* Copy the `service/anki.service` into `/etc/systemd/system/`\n ```ini\n [Unit]\n Description=anki\n Requires=docker.service\n After=docker.service\n\n [Service]\n Restart=always\n User=root\n Group=docker\n WorkingDirectory=/data/apps/anki\n TimeoutStartSec=100\n RestartSec=2s\n ExecStart=/usr/local/bin/docker-compose -f docker-compose.yaml up\n ExecStop=/usr/local/bin/docker-compose -f docker-compose.yaml down\n\n [Install]\n WantedBy=multi-user.target\n ```\n* Copy the `swag/anki.conf` into your nginx site-confs directory.\n ```nginx\n # make sure that your dns has a cname set for anki and that your anki container is not using a ase url\n\n server {\n listen 443 ssl;\n listen [::]:443 ssl;\n\n server_name anki.*;\n\n include /config/nginx/ssl.conf;\n\n client_max_body_size 0;\n\n # enable for ldap auth, fill in ldap details in ldap.conf\n #include /config/nginx/ldap.conf;\n\n location / {\n # enable the next two lines for http auth\n #auth_basic \"Restricted\";\n #auth_basic_user_file /config/nginx/.htpasswd;\n\n # enable the next two lines for ldap auth\n #auth_request /auth;\n #error_page 401 =200 /login;\n\n include /config/nginx/proxy.conf;\n resolver 127.0.0.11 valid=30s;\n set $upstream_anki anki;\n proxy_pass http://$upstream_anki:8080;\n }\n }\n\n ```\n* Build the docker with `docker-compose up --build`\n* Start the service `systemctl start anki`\n* If needed enable the service `systemctl enable anki`.\n\n**[Using docker](https://github.com/ankitects/anki/tree/main/docs/syncserver)**\n\nYou can use the [official image](https://hub.docker.com/r/jeankhawand/anki-sync-server). It doesn't support the `latest` tag, but opening an issue is a bit cumbersome\n\n*Building the image*\n\nUse the output of `anki --version` to deduce the ``\n```bash\ngit clone https://github.com/ankitects/anki\ncd anki/docs/syncserver/\ndocker build --no-cache --build-arg ANKI_VERSION= -t anki-sync-server .\n```\nGo make some tea, it takes a while to build the image.\n\nOnce done with build, you can proceed with running this image with the following command:\n\n```bash\ndocker run -d -e \"SYNC_USER1=admin:admin\" -p 8080:8080 --name anki-sync-server anki-sync-server\n```\n\nHowever, if you want to have multiple users, you have to use the following approach:\n\n```bash\ndocker run -d -e \"SYNC_USER1=test:test\" -e \"SYNC_USER2=test2:test2\" -p 8080:8080 --name anki-sync-server anki-sync-server\n```\n\nMoreover, you can pass additional env vars mentioned [here](https://docs.ankiweb.net/sync-server.html)\n\nThe server needs to store a copy of your collection and media in a directory. By default it is `~/.syncserver`; you can change this by defining a `SYNC_BASE`` environmental variable. This must not be the same location as your normal Anki data folder, as the server and client must store separate copies.\n\nThe server listens on an unencrypted HTTP connection, so it's not a good idea to expose it directly to the internet. You'll want to either restrict usage to your local network, or place some form of encryption in front of the server, such as a VPN, or a HTTPS reverse proxy.\n\nYou can define `SYNC_HOST` and `SYNC_PORT` to change the host and port that the server binds to.\n\n**[Using pip](https://docs.ankiweb.net/sync-server.html#with-pip)**\n\nTo avoid downloading desktop Anki's GUI dependencies, you can run a standalone Anki sync server using a Python package downloaded from PyPI instead. Make sure you have Python 3.9+ installed.\n\n```bash\npython3 -m venv ~/syncserver\n~/syncserver/bin/pip install anki\nSYNC_USER1=user:pass ~/syncserver/bin/python -m anki.syncserver\n```\n\nfeat(anki#Monitor the server): Monitor the server\n\nIn theory the docker has a [HEALTHCHECK](https://github.com/ankitects/anki/blob/main/docs/syncserver/Dockerfile#L30) but it's a lie, `/health` returns a 404 and is marked as unhealthy. The [official docker](https://hub.docker.com/r/jeankhawand/anki-sync-server/tags) doesn't yet support it, so we'd have to wait. I thought of opening an issue but you need to register on their forum.\n\nThey don't expose an API, and I haven't found any endpoint to be able to monitor it with the blackbox exporter, so I'll only monitor the logs\n\nfeat(anki#Running Anki in headless mode ): Running Anki in headless mode\n\nIf you want to interact with anki directly without opening the GUI application [you're out of luck](https://github.com/FooSoft/anki-connect/issues/411). You could try to [interact with the database directly](https://eshapard.github.io/anki/open-the-anki-database-from-python.html) but that's prone to errors if you use more than one client.\n\nfix(authentik#Hide and application from a user): Hide and application from a user\n\nWhen nothing is bound, everyone has access. You can use this to grant access to one or multiple users/groups, or dynamically give access using policies.\n\nBy default, all users can access applications when no policies are bound.\n\nWhen multiple policies/groups/users are attached, you can configure the Policy engine mode to either:\n\n- Require users to pass all bindings/be member of all groups (ALL), or\n- Require users to pass either binding/be member of either group (ANY)\n\nfeat(pytest): How not to run tests with two markers\n\nIf you wish not to run `slow` nor `secondary` you can use the flag `-m \"not slow and not secondary`.\n\nfeat(ruamel_yaml#Load from string): Load from string\n\n```python\nfrom ruyaml import YAML\n\nyaml = YAML().load('a: 1')\n```\n\nfix(efs): Don't trust EFS AWS Backup recovery point size\n\nHowever you can't trust the size you see from AWS backup console of the last snapshot as it's the size when the backup job created the recovery point, that is the size for the incremental data.\nUnfortunately there is no method for customer to check the real size of the oldest recovery point via AWS Backup web console.\n\nfeat(efs): Monitor the cost of the AWS Backup\n[Source](https://aws.amazon.com/blogs/storage/cost-allocation-and-tracking-for-aws-centralized-backups/)\n\nfix(forgejo): Codeberg doesn't support forgejo actions so far\n\nThe forgejo instance at [codeberg](https://codeberg.org) looks to be production ready they [have forgejo actions](https://docs.codeberg.org/ci/actions/) (although you need to host the runners yourself or use Woodpecker CI :/) and [static pages](#host-static-pages-site). It looks [they won't be moving to forgejo actions soon](https://codeberg.org/Codeberg-CI/feedback/issues/192), if you wish to follow this topic subscribe to [this thread](https://codeberg.org/Codeberg/Community/issues/843). If you want to activate the Woodpecker actions on your repo you need to [file an issue here](https://codeberg.org/Codeberg-e.V./requests), and you can file feedback requests [here](https://codeberg.org/Codeberg-CI/feedback/issues)\n\nfeat(knowledge_management#How to read): How to read\n\nWhen reading for purposes other than entertainment, we typically aim to achieve one of two outcomes: acquiring information or deepening our understanding. Reading for entertainment is straightforward and requires no further explanation.\n\n**Reading for Information vs. Understanding**\n\n- **Reading for Information**: This involves consuming content that is easy to digest, such as news articles or casual reading materials. It increases our knowledge but doesn’t necessarily enhance our understanding of a topic.\n\n- **Reading for Understanding**: This type of reading involves engaging with works by authors who possess greater expertise on a subject. Such reading challenges our current knowledge and can either refine our understanding or reveal its limitations.\n\n**The Four Levels of Reading**\n\nReading can be broken down into four cumulative levels, each building upon the previous one:\n\n1. **Elementary Reading**\n This is the basic reading skill we learn in elementary school.\n\n2. **Inspectional Reading**\n This level involves quickly getting the gist of a text in a limited amount of time. The goal is to decide whether to read the text in its entirety and to understand its general structure and key points.\n - **Systematic Skimming**: Skim the introduction, table of contents, index, and key chapters to identify the main ideas. Jump in and out, reading a paragraph here and there.\n - **Superficial Reading**: Read through the book quickly without pausing to deeply analyze or reflect. The focus is on covering most of the content to get a broad overview so expect not to understand all the nuances.\n\n3. **Analytical Reading**\n This is a thorough and detailed form of reading aimed at deep understanding. It requires active engagement with the text, including marking important passages, asking questions, and making notes in the margins.\n - **Deep Analysis**: Engage with the text by:\n - Underline major points\n - Use vertical lines in the margin for longer passages\n - Mark important statements with stars or asterisks\n - Number sequences of points in an argument\n - Cross-reference related ideas with page numbers\n - Circle keywords or phrases\n - Write questions and answers in the margins\n - **[Incremental reading](https://en.wikipedia.org/wiki/Incremental_reading)**: is a software-assisted method for learning and retaining information from reading, which involves the creation of flashcards out of electronic articles. \"Incremental reading\" means \"reading in portions\". Instead of a linear reading of text one at a time, the method works by keeping a large list of electronic articles or books (often dozens or hundreds) and reading parts of several articles in each session. The user prioritizes articles in the reading list. During reading, key points of articles are broken up into flashcards, which are then learned and reviewed over an extended period with the help of a spaced repetition algorithm.\n\n4. **Syntopical Reading**\n The most advanced level, syntopical reading, involves reading multiple books on the same subject and synthesizing the information to construct a new understanding. This comparative approach requires placing various texts in conversation with each other, allowing the reader to develop insights that may not be explicitly stated in any single source.\n\nfeat(knowledge_management#How to Do Analytical Reading): How to Do Analytical Reading\n\nI organize my learning topics in a `learn.org` document structured as follows:\n\n```org\n* DOING Learn to apply joyful militancy concepts.\n - [ ] [[*Analytical read the Joyful militancy book]]\n* TODO Improve efficiency\n - [ ] [[*Analytical read Four Thousand Weeks by Oliver Burkman]]\n\n* Learn backlog\n\n** Activism\n ...\n** Psychology\n ...\n** ...\n```\n\nThe main headings represent the topics I am actively learning, selected during my [roadmap adjustment](roadmap_adjustment.md). These topics typically require analytical reading of specific texts. The approach I take may vary depending on the source material.\n\n- [e-books](#analytical-reading-on-e-books)\n- [books](#analytical-reading-on-books)\n- [web browsing](#analytical-reading-while-web-browsing)\n\n**Analytical Reading of E-books**\n\nI use Kobo to read and underline technical and political books. Compared to physical books, e-books offer several advantages:\n\n**Pros:**\n- Easy export of highlights\n- Convenient for incremental reading\n- Quick access to a different topics\n\n**Cons:**\n- Difficult to take notes in the margins due to Kobo's slow and cumbersome note-making interface\n\nThe process goes as follows:\n\n1. **Import the e-book**: I use [`ebops load file.epub`](https://codeberg.org/lyz/ebops) to load the e-book to the e-reader, and create an TODO element in my `books.org` document with the table of contents so that it's easy to add my ideas using Orgzly.\n1. **Read linearly:** I read the text in order. Skimming is challenging, especially with unfamiliar topics.\n2. **Underline freely:** I highlight extensively without concern for over-marking.\n3. **Export highlights:** I use [`ebops export-highlights`](https://codeberg.org/lyz/ebops) to automatically extract the highlights on a `learn.org` document. The highlights are organised in nested TODO elements for each chapter.\n4. **Process key sections:** Select the sections that are more interesting to process. For each of them:\n - Copy and paste the underlined content into the relevant section of my notes, disregarding formatting.\n - Review the mobile notes document for any additional thoughts.\n - Add personal reflections on the topic.\n - Use AI to polish the text.\n - Ask AI to generate [Anki cards](anki.md) based on the content.\n\nfeat(mochi): Introduce Mochi\nMochi is a modern Anki with a lot of cool features, but it's closed sourced.\n\nReferences:\n- [Homepage](https://mochi.cards/)\n- [FAQ](https://mochi.cards/docs/faq/)\n- [Blog](https://mochi.cards/blog/)\n- [API](https://mochi.cards/docs/api/)\n\nfix(org_rw#Load an orgmode file): Load an orgmode file\n\n```python\nfrom org_rw import loads\n\ndoc = loads(Path('your_file.org').read_text())\n```\n\nfeat(org_rw#Write to an orgmode file): Write to an orgmode file\n\n```python\nfrom org_rw import dumps\n\nPath('your_file.org').write_text(dumps(doc))\n```\n\nfeat(sqlite3#database or disk is full): Fix database or disk is full\n\nThe error message **\"database or disk is full\"** in SQLite typically indicates that there is insufficient storage space available for the database to operate properly. This can happen for several reasons, including:\n\n1. **Disk Space is Full**: The most common reason is that the disk where the SQLite database is stored has run out of space.\n\n2. **SQLite Database Size Limitations**: SQLite databases have size limitations depending on the file system or the SQLite version. For example, the maximum size of an SQLite database is 140 terabytes. If you are approaching this limit, you might encounter this error.\n\n3. **Quota Limits**: If the database is stored on a network drive or within a user directory, there might be storage quotas imposed by the system administrator.\n\n4. **Temporary Directory Space**: SQLite uses temporary files during operations. If the directory where these files are stored is full, it can trigger this error.\n\n5. **Corrupted Database File**: In some cases, a corrupted database file can lead to this error.\n\n**Troubleshooting Steps**\n\n1. **Check Disk Space**:\n - Ensure that there is enough free disk space on the drive where the SQLite database is stored. You can use `df -h`\n\n2. **Check Database Size**:\n - Confirm that the database size is within the acceptable limits for your system.\n - You can check the size of the database file directly using file properties.\n\n3. **Check Quota Limits**:\n - Verify that no storage quota is being exceeded if the database is on a network drive or within a managed user directory.\n\n4. **Free Up Space in the Temporary Directory**:\n - Clear up space in the temporary directory used by SQLite (`/tmp` on Unix-like systems).\n\n5. **Vacuum the Database**:\n - If the database has grown large due to deletions and other operations, you can try running `VACUUM` on the database to reclaim unused space:\n ```python\n import sqlite3\n\n conn = sqlite3.connect('your_database.db')\n conn.execute('VACUUM')\n conn.close()\n ```\n - This might help reduce the size of the database file.\n\n6. **Backup and Restore**:\n - If the database might be corrupted, you could try creating a backup and then restoring it.\n\n7. **Check for Corruption**:\n - Use the `PRAGMA integrity_check;` command to check for any database corruption:\n ```python\n import sqlite3\n\n conn = sqlite3.connect('your_database.db')\n result = conn.execute('PRAGMA integrity_check').fetchall()\n conn.close()\n\n if result[0][0] == 'ok':\n print(\"Database is healthy.\")\n else:\n print(\"Database corruption detected.\")\n ```\n\n8. **Check File System Limits**:\n - **File Descriptor Limits**: On some systems, there might be a limit on the number of open files (file descriptors) that a process can have. You can check and increase this limit if needed. On Linux, you can check the limit using:\n ```sh\n ulimit -n\n ```\n To increase it temporarily, use:\n ```sh\n ulimit -n 4096 # Example to increase to 4096\n ```\n\n9. **SQLite Journal Mode**:\n - SQLite uses a journal file during transactions. The default journal mode (`DELETE`) can be space-intensive in some situations.\n - You can try switching to `WAL` (Write-Ahead Logging) mode, which can be more efficient with space:\n ```python\n import sqlite3\n\n conn = sqlite3.connect('your_database.db')\n conn.execute('PRAGMA journal_mode=WAL;')\n conn.close()\n ```\n - This might help alleviate issues related to temporary file space.\n\n10. **Check for File Permissions**:\n - Ensure that the directory where the database file resides has the correct permissions and that the user running the SQLite process has write access.\n\n11. **Check SQLite Version**:\n - Ensure that you are using a relatively recent version of SQLite. Some bugs in older versions might cause issues that have been resolved in later releases.\n\n12. **Database Locking Issues**:\n - Sometimes, if a process is holding a lock on the database for an extended period, it could cause issues. Make sure no other process is holding onto the database.\n\n13. **Try Rebuilding the Database**:\n - If none of the above works, consider creating a fresh database and migrating the data:\n ```python\n import sqlite3\n\n old_conn = sqlite3.connect('your_database.db')\n new_conn = sqlite3.connect('new_database.db')\n\n with new_conn:\n old_conn.backup(new_conn)\n\n old_conn.close()\n new_conn.close()\n ```\n - This will create a new database file and might resolve any hidden issues with the current file.\n\n14. **SQLite Memory Limitations**:\n - SQLite has a memory limit that could be reached when processing large or complex queries. You can try increasing the cache size:\n ```python\n import sqlite3\n\n conn = sqlite3.connect('your_database.db')\n conn.execute('PRAGMA cache_size = 10000;') # Increase the cache size\n conn.close()\n ```\n - You could also adjust the page size or other memory-related parameters:\n ```python\n conn.execute('PRAGMA page_size = 4096;') # Default is 1024, try increasing it\n conn.execute('PRAGMA temp_store = MEMORY;') # Store temporary tables in memory\n ```","shortMessageHtmlLink":"feat(anki#Install the official sync server): Install the official syn…"}},{"before":"ac3de109441057294769bb3ac5b4fdf5bbf07788","after":"8f48f34b562d883954481c1101e6a6d5ca26130a","ref":"refs/heads/gh-pages","pushedAt":"2024-08-14T06:46:09.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 1f09af8b3bd6611b56263aad6865559353fa9f56","shortMessageHtmlLink":"deploy: 1f09af8"}},{"before":"873ba6cdef8eaa4286fcd71838e3d9bbd65f0854","after":"ac3de109441057294769bb3ac5b4fdf5bbf07788","ref":"refs/heads/gh-pages","pushedAt":"2024-08-13T06:46:58.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 1f09af8b3bd6611b56263aad6865559353fa9f56","shortMessageHtmlLink":"deploy: 1f09af8"}},{"before":"abd5a81cf0179112981743f82f7060c654fdb01b","after":"873ba6cdef8eaa4286fcd71838e3d9bbd65f0854","ref":"refs/heads/gh-pages","pushedAt":"2024-08-12T06:46:46.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 5ce66dfda76fa5b45b38685021b46be18996e1d8","shortMessageHtmlLink":"deploy: 5ce66df"}},{"before":"5ce66dfda76fa5b45b38685021b46be18996e1d8","after":"1f09af8b3bd6611b56263aad6865559353fa9f56","ref":"refs/heads/main","pushedAt":"2024-08-12T06:46:27.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"github-actions[bot]","name":null,"path":"/apps/github-actions","primaryAvatarUrl":"https://avatars.githubusercontent.com/in/15368?s=80&v=4"},"commit":{"message":"chore: update dependency, publish newsletters and add the not by ai badge","shortMessageHtmlLink":"chore: update dependency, publish newsletters and add the not by ai b…"}},{"before":"f45adaa171a2b1a32a3290c93d203c652fcc6866","after":"abd5a81cf0179112981743f82f7060c654fdb01b","ref":"refs/heads/gh-pages","pushedAt":"2024-08-11T06:45:54.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 5ce66dfda76fa5b45b38685021b46be18996e1d8","shortMessageHtmlLink":"deploy: 5ce66df"}},{"before":"c9e1ed04ef01f58718eae1b6b08fad543ffcf898","after":"f45adaa171a2b1a32a3290c93d203c652fcc6866","ref":"refs/heads/gh-pages","pushedAt":"2024-08-10T06:44:26.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 5ce66dfda76fa5b45b38685021b46be18996e1d8","shortMessageHtmlLink":"deploy: 5ce66df"}},{"before":"6f65de1890ff33559adacfd02404dab3f77ef013","after":"c9e1ed04ef01f58718eae1b6b08fad543ffcf898","ref":"refs/heads/gh-pages","pushedAt":"2024-08-09T06:46:52.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 5ce66dfda76fa5b45b38685021b46be18996e1d8","shortMessageHtmlLink":"deploy: 5ce66df"}},{"before":"7cc2f32373aae4c1a3f463736ae2fd5c3b3f3700","after":"6f65de1890ff33559adacfd02404dab3f77ef013","ref":"refs/heads/gh-pages","pushedAt":"2024-08-08T06:46:58.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 273502ba8ba40dd8541c43fbd5ec26e6f222b5d9","shortMessageHtmlLink":"deploy: 273502b"}},{"before":"273502ba8ba40dd8541c43fbd5ec26e6f222b5d9","after":"5ce66dfda76fa5b45b38685021b46be18996e1d8","ref":"refs/heads/main","pushedAt":"2024-08-08T06:46:33.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"github-actions[bot]","name":null,"path":"/apps/github-actions","primaryAvatarUrl":"https://avatars.githubusercontent.com/in/15368?s=80&v=4"},"commit":{"message":"chore: update dependency, publish newsletters and add the not by ai badge","shortMessageHtmlLink":"chore: update dependency, publish newsletters and add the not by ai b…"}},{"before":"779fd2dbfa72a237b757cd315f43cf91b56d922e","after":"7cc2f32373aae4c1a3f463736ae2fd5c3b3f3700","ref":"refs/heads/gh-pages","pushedAt":"2024-08-07T10:37:11.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 273502ba8ba40dd8541c43fbd5ec26e6f222b5d9","shortMessageHtmlLink":"deploy: 273502b"}},{"before":"736a40629e3271d492ada61c63125858b24450b8","after":"273502ba8ba40dd8541c43fbd5ec26e6f222b5d9","ref":"refs/heads/main","pushedAt":"2024-08-07T10:23:05.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"feat(ai_coding): add new prompts for developers\n\n```yaml\n - trigger: :polish\n form: |\n Polish the next code\n [[code]]\n with the next conditions:\n - Use type hints on all functions and methods\n - Add or update the docstring using google style on all functions and methods\n form_fields:\n code:\n multiline: true\n - trigger: :commit\n form: |\n Act as an expert developer. Create a message commit with the next conditions:\n - follow semantic versioning\n - create a semantic version comment per change\n - include all comments in a raw code block so that it's easy to copy\n\n for the following diff\n [[text]]\n form_fields:\n text:\n multiline: true\n```\n\nfeat(aleph#API Usage): API Usage\n\nThe Aleph web interface is powered by a Flask HTTP API. Aleph supports an extensive API for searching documents and entities. It can also be used to retrieve raw metadata, source documents and other useful details. Aleph's API tries to follow a pragmatic approach based on the following principles:\n\n- All API calls are prefixed with an API version; this version is /api/2/.\n- Responses and requests are both encoded as JSON. Requests should have the Content-Type and Accept headers set to application/json.\n- The application uses Representational State Transfer (REST) principles where convenient, but also has some procedural API calls.\n- The API allows API Authorization via an API key or JSON Web Tokens.\n\n**[Authentication and authorization](https://redocly.github.io/redoc/?url=https://aleph.occrp.org/api/openapi.json#section/Authentication-and-Authorization)**\n\nBy default, any Aleph search will return only public documents in responses to API requests.\n\nIf you want to access documents which are not marked public, you will need to sign into the tool. This can be done through the use on an API key. The API key for any account can be found by clicking on the \"Profile\" menu item in the navigation menu.\n\nThe API key must be sent on all queries using the Authorization HTTP header:\n\nAuthorization: ApiKey 363af1e2b03b41c6b3adc604956e2f66\n\nAlternatively, the API key can also be sent as a query parameter under the api_key key.\n\nSimilarly, a JWT can be sent in the Authorization header, after it has been returned by the login and/or OAuth processes. Aleph does not use session cookies or any other type of stateful API.\n\nfeat(aleph#Crossreferencing mentions with entities): Crossreferencing mentions with entities\n\n[Mentions](https://docs.aleph.occrp.org/developers/explanation/cross-referencing/#mentions) are names of people or companies that Aleph automatically extracts from files you upload. Aleph includes mentions when cross-referencing a collection, but only in one direction.\n\nConsider the following example:\n\n- \"Collection A\" contains a file. The file mentions \"John Doe\".\n- \"Collection B\" contains a Person entity named \"John Doe\".\n\nIf you cross-reference “Collection A”, Aleph includes the mention of “John Doe” in the cross-referencing and will find a match for it in “Collection B”.\n\nHowever, if you cross-reference “Collection B”, Aleph doesn't consider mentions when trying to find a match for the Person entity.\n\nAs long as you only want to compare the mentions in one specific collection against entities (but not mentions) in another collection, Aleph’s cross-ref should be able to do that. If you want to compare entities in a specific collection against other entities and mentions in other collections, you will have to do that yourself.\n\nIf you have a limited number of collection, one option might be to fetch all mentions and automatically create entities for each mention using the API.\n\nTo fetch a list of mentions for a collection you can use the `/api/2/entities?filter:collection_id=137&filter:schemata=Mention` API request.\n\nfeat(python_snippets#Send a linux desktop notification): Send a linux desktop notification\n\nTo show a Linux desktop notification from a Python script, you can use the `notify2` library (although [it's last commit was done on 2017](https://pypi.org/project/notify2/). This library provides an easy way to send desktop notifications on Linux.\n\nAlternatively, you can use the `subprocess` module to call the `notify-send` command-line utility directly. This is a more straightforward method but requires `notify-send` to be installed.\n\n```python\nimport subprocess\n\ndef send_notification(title: str, message: str = \"\", urgency: str = \"normal\") -> None:\n \"\"\"Send a desktop notification using notify-send.\n\n Args:\n title (str): The title of the notification.\n message (str): The message body of the notification. Defaults to an empty string.\n urgency (str): The urgency level of the notification. Can be 'low', 'normal', or 'critical'. Defaults to 'normal'.\n \"\"\"\n subprocess.run([\"notify-send\", \"-u\", urgency, title, message])\n```\n\nfeat(python_snippets#Get the error string): Get the error string\n\n```python\n\nimport traceback\n\ndef cause_error():\n return 1 / 0 # This will raise a ZeroDivisionError\n\ntry:\n cause_error()\nexcept Exception as error:\n # Capture the exception traceback as a string\n error_message = \"\".join(traceback.format_exception(None, error, error.__traceback__))\n print(\"An error occurred:\\n\", error_message)\n```\n\nfeat(docker#Minify the images): Minify the images\n\n[dive](https://github.com/wagoodman/dive) and [slim](https://github.com/slimtoolkit/slim) are two cli tools you can use to optimise the size of your dockers.\n\nfeat(espanso#Desktop application to add words easily): Desktop application to add words easily\n\nGoing into the espanso config files to add words is cumbersome, to make things easier you can use the `espansadd` Python script.\n\nI'm going to assume that you have the following prerequisites:\n\n- A Linux distribution with i3 window manager installed.\n- Python 3 installed.\n- Espanso installed and configured.\n- `ruyaml` and `tkinter` Python libraries installed.\n- `notify-send` installed.\n- Basic knowledge of editing configuration files in i3.\n\n**Installation**\n\nCreate a new Python script named `espansadd.py` with the following content:\n\n```python\nimport tkinter as tk\nfrom tkinter import simpledialog\nimport traceback\nimport subprocess\nimport os\nimport sys\n\nfrom ruyaml import YAML\nfrom ruyaml.scanner import ScannerError\n\nfile_path = os.path.expanduser(\"~/.config/espanso/match/typofixer_overwrite.yml\")\n\ndef append_to_yaml(file_path: str, trigger: str, replace: str) -> None:\n \"\"\"Appends a new entry to the YAML file.\n\n Args:ath\n file_path (str): The file to append the new entry.\n trigger (str): The trigger string to be added.\n replace (str): The replacement string to be added.\n \"\"\"\n\n # Define the new snippet\n new_entry = {\n \"trigger\": trigger,\n \"replace\": replace,\n \"propagate_case\": True,\n \"word\": True,\n }\n\n # Load existing data or initialize an empty list\n try:\n with open(os.path.expanduser(file_path), \"r\") as f:\n try:\n data = YAML().load(f)\n except ScannerError as e:\n send_notification(\n f\"Error parsing yaml of configuration file {file_path}\",\n f\"{e.problem_mark}: {e.problem}\",\n \"critical\",\n )\n sys.exit(1)\n except FileNotFoundError:\n send_notification(\n f\"Error opening the espanso file {file_path}\", urgency=\"critical\"\n )\n sys.exit(1)\n\n data[\"matches\"].append(new_entry)\n\n # Write the updated data back to the file\n with open(os.path.expanduser(file_path), \"w+\") as f:\n yaml = YAML()\n yaml.default_flow_style = False\n yaml.dump(data, f)\n\ndef send_notification(title: str, message: str = \"\", urgency: str = \"normal\") -> None:\n \"\"\"Send a desktop notification using notify-send.\n\n Args:\n title (str): The title of the notification.\n message (str): The message body of the notification. Defaults to an empty string.\n urgency (str): The urgency level of the notification. Can be 'low', 'normal', or 'critical'. Defaults to 'normal'.\n \"\"\"\n subprocess.run([\"notify-send\", \"-u\", urgency, title, message])\n\ndef main() -> None:\n \"\"\"Main function to prompt user for input and append to the YAML file.\"\"\"\n # Create the main Tkinter window (it won't be shown)\n window = tk.Tk()\n window.withdraw() # Hide the main window\n\n # Prompt the user for input\n trigger = simpledialog.askstring(\"Espanso add input\", \"Enter trigger:\")\n replace = simpledialog.askstring(\"Espanso add input\", \"Enter replace:\")\n\n # Check if both inputs were provided\n try:\n if trigger and replace:\n append_to_yaml(file_path, trigger, replace)\n send_notification(\"Espanso snippet added successfully\")\n else:\n send_notification(\n \"Both trigger and replace are required\", urgency=\"critical\"\n )\n except Exception as error:\n error_message = \"\".join(\n traceback.format_exception(None, error, error.__traceback__)\n )\n send_notification(\n \"There was an unknown error adding the espanso entry\",\n error_message,\n urgency=\"critical\",\n )\n\nif __name__ == \"__main__\":\n main()\n```\n\nEnsure the script has executable permissions. Run the following command:\n\n```bash\nchmod +x espansadd.py\n```\n\nTo make the `espansadd` script easily accessible, we can configure a key binding in i3 to run the script. Open your i3 configuration file, typically located at `~/.config/i3/config` or `~/.i3/config`, and add the following lines:\n\n```bash\nbindsym $mod+Shift+e exec --no-startup-id /path/to/your/espansadd.py\n```\n\nReplace `/path/to/your/espansadd.py` with the actual path to your script.\n\nIf you also want the popup windows to be in floating mode add\n\n```bash\nfor_window [title=\"Espanso add input\"] floating enable\n```\n\nAfter editing the configuration file, reload i3 to apply the changes. You can do this by pressing `Mod` + `Shift` + `R` (where `Mod` is typically the `Super` or `Windows` key) or by running the following command:\n\n```bash\ni3-msg reload\n```\n\n**Usage**\n\nNow that everything is set up, you can use the `espansadd` script by pressing `Mod` + `Shift` + `E`. This will open a dialog where you can enter the trigger and replacement text for the new Espanso snippet. After entering the information and pressing Enter, a notification will appear confirming the snippet has been added, or showing an error message if something went wrong.\n\nfeat(wireguard#Check the status of the tunnel): Check the status of the tunnel\n\nOne method is to do ping between VPN IP addresses or run command `wg show`` from the server or from the client.\nBelow you can see `wg show`` command output where VPN is *not* up.\n\n```bash\n$: wg show\ninterface: wg0\n public key: qZ7+xNeXCjKdRNM33Diohj2Y/KSOXwvFfgTS1LRx+EE=\n private key: (hidden)\n listening port: 45703\n\npeer: mhLzGkqD1JujPjEfZ6gkbusf3sfFzy+1KXBwVNBRBHs=\n endpoint: 3.133.147.235:51820\n allowed ips: 10.100.100.1/32\n transfer: 0 B received, 592 B sent\n persistent keepalive: every 21 seconds\n```\n\nThe below output from the `wg show` command indicates the VPN link is up. See the line with `last handshake time`\n\n```bash\n$: wg show\ninterface: wg0\n public key: qZ7+xNeXCjKdRNM33Diohj2Y/KSOXwvFfgTS1LRx+EE=\n private key: (hidden)\n listening port: 49785\n\npeer: 6lf4SymMbY+WboI4jEsM+P9DhogzebSULrkFowDTt0M=\n endpoint: 3.133.147.235:51820\n allowed ips: 10.100.100.1/32\n latest handshake: 14 seconds ago\n transfer: 732 B received, 820 B sent\n persistent keepalive: every 21 seconds\n```\n\nfeat(org_rw#Change the default org-todo-keywords): Change the default org-todo-keywords\n\n```python\norig = '''* NEW_TODO_STATE First entry\n\n* NEW_DONE_STATE Second entry'''\ndoc = loads(orig, environment={\n 'org-todo-keywords': \"NEW_TODO_STATE | NEW_DONE_STATE\"\n})\n```","shortMessageHtmlLink":"feat(ai_coding): add new prompts for developers"}},{"before":"d9b2cf8711a206d88a1642fbba44a5539e1c7a26","after":"779fd2dbfa72a237b757cd315f43cf91b56d922e","ref":"refs/heads/gh-pages","pushedAt":"2024-08-07T06:45:54.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 736a40629e3271d492ada61c63125858b24450b8","shortMessageHtmlLink":"deploy: 736a406"}},{"before":"45213c7a89b2cbdcfb2fcb8813d364c13715254b","after":"d9b2cf8711a206d88a1642fbba44a5539e1c7a26","ref":"refs/heads/gh-pages","pushedAt":"2024-08-06T06:46:58.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 736a40629e3271d492ada61c63125858b24450b8","shortMessageHtmlLink":"deploy: 736a406"}},{"before":"efed06f47a5283ed06cca3afa2eec296ce363fde","after":"45213c7a89b2cbdcfb2fcb8813d364c13715254b","ref":"refs/heads/gh-pages","pushedAt":"2024-08-05T06:46:59.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 20b0a15034118f985469654e96372069a09ba22e","shortMessageHtmlLink":"deploy: 20b0a15"}},{"before":"20b0a15034118f985469654e96372069a09ba22e","after":"736a40629e3271d492ada61c63125858b24450b8","ref":"refs/heads/main","pushedAt":"2024-08-05T06:46:39.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"github-actions[bot]","name":null,"path":"/apps/github-actions","primaryAvatarUrl":"https://avatars.githubusercontent.com/in/15368?s=80&v=4"},"commit":{"message":"chore: update dependency, publish newsletters and add the not by ai badge","shortMessageHtmlLink":"chore: update dependency, publish newsletters and add the not by ai b…"}},{"before":"abb322b9c643708464d6958bf22d3e658aa895a1","after":"efed06f47a5283ed06cca3afa2eec296ce363fde","ref":"refs/heads/gh-pages","pushedAt":"2024-08-04T06:44:22.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 20b0a15034118f985469654e96372069a09ba22e","shortMessageHtmlLink":"deploy: 20b0a15"}},{"before":"8b991171c0aa59937fef43da4dade87713a3e503","after":"abb322b9c643708464d6958bf22d3e658aa895a1","ref":"refs/heads/gh-pages","pushedAt":"2024-08-03T06:44:52.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 258ba7ecf262f86ef3555cd5a9c50e2d313e3a18","shortMessageHtmlLink":"deploy: 258ba7e"}},{"before":"258ba7ecf262f86ef3555cd5a9c50e2d313e3a18","after":"20b0a15034118f985469654e96372069a09ba22e","ref":"refs/heads/main","pushedAt":"2024-08-03T06:44:33.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"github-actions[bot]","name":null,"path":"/apps/github-actions","primaryAvatarUrl":"https://avatars.githubusercontent.com/in/15368?s=80&v=4"},"commit":{"message":"chore: update dependency, publish newsletters and add the not by ai badge","shortMessageHtmlLink":"chore: update dependency, publish newsletters and add the not by ai b…"}},{"before":"6b3e79da4a52be61f32d27770bfba561740bf5e6","after":"8b991171c0aa59937fef43da4dade87713a3e503","ref":"refs/heads/gh-pages","pushedAt":"2024-08-02T06:46:50.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lyz-code","name":"Lyz","path":"/lyz-code","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/24810987?s=80&v=4"},"commit":{"message":"deploy: 258ba7ecf262f86ef3555cd5a9c50e2d313e3a18","shortMessageHtmlLink":"deploy: 258ba7e"}}],"hasNextPage":true,"hasPreviousPage":false,"activityType":"all","actor":null,"timePeriod":"all","sort":"DESC","perPage":30,"cursor":"djE6ks8AAAAEnsIMfQA","startCursor":null,"endCursor":null}},"title":"Activity · lyz-code/blue-book"}