Skip to content

Commit

Permalink
enhance: dependencyHash Calculation algorithm
Browse files Browse the repository at this point in the history
  • Loading branch information
debanjandhar12 committed Jul 10, 2022
1 parent dbf158c commit 67553bd
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 28 deletions.
22 changes: 14 additions & 8 deletions src/converter/getContentDirectDependencies.ts
@@ -1,24 +1,30 @@
import { BlockUUID } from "@logseq/libs/dist/LSPlugin";
import { LOGSEQ_BLOCK_REF_REGEXP, LOGSEQ_EMBDED_PAGE_REGEXP, LOGSEQ_EMBDED_BLOCK_REGEXP } from "../constants";

export default function getContentDirectDependencies(content: string, format: string = "markdown"): (BlockUUID | PageEntityName)[] {
export type ReferenceDependency = {
type: "Embedded_Page_ref" | "Block_ref" | "Embedded_Block_ref",
value: BlockUUID | PageEntityName
}
export default function getContentDirectDependencies(content: string, format: string = "markdown"): ReferenceDependency[] {
if(content == null || content == undefined) return [];
let blockDependency: Set<BlockUUID> = new Set();
let pageDependency: Set<PageEntityName> = new Set();
let blockRefDependency: Set<BlockUUID> = new Set();
let blockEmbededDependency: Set<BlockUUID> = new Set();
let pageEmbededDependency: Set<PageEntityName> = new Set();
// Add dependencies due to LOGSEQ_EMBDED_BLOCK_REGEXP
let match;
while (match = LOGSEQ_EMBDED_BLOCK_REGEXP.exec(content)) {
blockDependency.add(match[1]);
blockEmbededDependency.add(match[1]);
}
// Add dependencies due to LOGSEQ_BLOCK_REF_REGEXP
while (match = LOGSEQ_BLOCK_REF_REGEXP.exec(content)) {
blockDependency.add(match[1]);
blockRefDependency.add(match[1]);
}
// Add dependencies due to LOGSEQ_EMBDED_PAGE_REGEXP
while (match = LOGSEQ_EMBDED_PAGE_REGEXP.exec(content)) {
pageDependency.add(new PageEntityName(match[1]));
pageEmbededDependency.add(new PageEntityName(match[1]));
}
return [...blockDependency, ...pageDependency];
return [...Array.from(blockRefDependency).map(block => ({ type: "Block_ref", value: block } as ReferenceDependency)),
...Array.from(blockEmbededDependency).map(block => ({ type: "Embedded_Block_ref", value: block } as ReferenceDependency)),
...Array.from(pageEmbededDependency).map(page => ({ type: "Embedded_Page_ref", value: page } as ReferenceDependency))];
}

export class PageEntityName {
Expand Down
5 changes: 3 additions & 2 deletions src/notes/MultilineCardNote.ts
Expand Up @@ -6,6 +6,7 @@ import { safeReplace } from '../utils';
import { ANKI_CLOZE_REGEXP, MD_PROPERTIES_REGEXP } from "../constants";
import { SyncronizedLogseq } from "../SyncronizedLogseq";
import { BlockUUID } from "@logseq/libs/dist/LSPlugin.user";
import { ReferenceDependency } from "../converter/getContentDirectDependencies";

export class MultilineCardNote extends Note {
public type: string = "multiline_card";
Expand Down Expand Up @@ -151,7 +152,7 @@ export class MultilineCardNote extends Note {
return blocks;
}

public getDirectDeendencies(): BlockUUID[] {
public getDirectDeendencies(): ReferenceDependency[] {
function getChildrenUUID(children: any): BlockUUID[] {
let result = [];
for (let child of children) {
Expand All @@ -160,6 +161,6 @@ export class MultilineCardNote extends Note {
}
return result;
}
return [this.uuid,...getChildrenUUID(this.children)];
return [this.uuid,...getChildrenUUID(this.children)].map(block => ({ type: "Embedded_Block_ref", value: block } as ReferenceDependency));
}
}
58 changes: 40 additions & 18 deletions src/notes/Note.ts
@@ -1,12 +1,13 @@
import '@logseq/libs'
import { LazyAnkiNoteManager } from '../anki-connect/LazyAnkiNoteManager';
import _ from 'lodash';
import _, { replace } from 'lodash';
import { HTMLFile } from '../converter/Converter';
import { BlockUUID } from '@logseq/libs/dist/LSPlugin.user';
import getContentDirectDependencies, { PageEntityName } from '../converter/getContentDirectDependencies';
import getContentDirectDependencies, { PageEntityName, ReferenceDependency } from '../converter/getContentDirectDependencies';
import { SyncronizedLogseq } from '../SyncronizedLogseq';
import pkg from '../../package.json';
import hashSum from 'hash-sum';
import { MD_PROPERTIES_REGEXP, ORG_PROPERTIES_REGEXP } from '../constants';

export abstract class Note {
public uuid: string;
Expand Down Expand Up @@ -45,39 +46,60 @@ export abstract class Note {
return this.ankiId;
}

public getDirectDeendencies(): BlockUUID[] {
return [this.uuid];
public getDirectDeendencies(): ReferenceDependency[] {
return [this.uuid].map(block => ({ type: "Embedded_Block_ref", value: block } as ReferenceDependency));
}

public async getAllDependenciesHash(additionalDependencies = []): Promise<string> {
let toHash = [...additionalDependencies];
let blockDependencies : Set<BlockUUID> | BlockUUID[] = new Set<BlockUUID>();
let pageDependencies : Set<PageEntityName> | PageEntityName[] = new Set<PageEntityName>();
let blockRefDependencies : Set<BlockUUID> = new Set<BlockUUID>();
let blockEmbededDependencies : Set<BlockUUID> = new Set<BlockUUID>();
let pageEmbededDependencies : Set<PageEntityName> = new Set<PageEntityName>();

// DFS to get all dependencies
let stack : (BlockUUID | PageEntityName)[] = this.getDirectDeendencies();
let stack : ReferenceDependency[] = this.getDirectDeendencies();
let parentID = (await SyncronizedLogseq.Editor.getBlock(this.uuid)).parent.id;
let parent;
while ((parent = await SyncronizedLogseq.Editor.getBlock(parentID)) != null) {
stack.push(parent.uuid["$uuid$"] || parent.uuid.Wd || parent.uuid);
parentID = parent.parent.id;
}
while (stack.length > 0) {
if(stack.at(-1) instanceof PageEntityName) {pageDependencies.add(stack.pop() as PageEntityName); continue;}
let uuid = stack.pop() as BlockUUID;
if (blockDependencies.has(uuid)) continue;
blockDependencies.add(uuid);
let block = await SyncronizedLogseq.Editor.getBlock(uuid);
stack.push(...getContentDirectDependencies(_.get(block, 'content',''), _.get(block, 'format','')));
let dependency = stack.pop();
if(dependency.type == "Embedded_Block_ref") {
if(blockEmbededDependencies.has(dependency.value as BlockUUID)) continue;
blockEmbededDependencies.add(dependency.value as BlockUUID);
let block = await SyncronizedLogseq.Editor.getBlock(dependency.value as BlockUUID);
stack.push(...getContentDirectDependencies(_.get(block, 'content',''), _.get(block, 'format','')));
}
else if(dependency.type == "Block_ref") {
if(blockEmbededDependencies.has(dependency.value as BlockUUID) || blockRefDependencies.has(dependency.value as BlockUUID)) continue;
blockRefDependencies.add(dependency.value as BlockUUID);
let block = await SyncronizedLogseq.Editor.getBlock(dependency.value as BlockUUID);
let block_content = _.get(block, 'content','');
block_content = replace(block_content, MD_PROPERTIES_REGEXP, "");
block_content = replace(block_content, ORG_PROPERTIES_REGEXP, "");
let block_content_first_line = block_content.split("\n").find(line => line.trim() != "");
stack.push(...getContentDirectDependencies(block_content_first_line, _.get(block, 'format','')));
}
else if(dependency.type == "Embedded_Page_ref") {
pageEmbededDependencies.add(dependency.value as PageEntityName);
}
}
blockDependencies = _.sortBy(Array.from(blockDependencies));

for (let uuid of blockDependencies) {
for (let uuid of blockEmbededDependencies) {
let block = await SyncronizedLogseq.Editor.getBlock(uuid);
toHash.push({content:_.get(block, 'content',''), format:_.get(block, 'format','markdown'), parent:_.get(block, 'parent.id',''), left:_.get(block, 'left.id','')});
}
pageDependencies = _.sortBy(Array.from(pageDependencies));
for (let PageEntityName of pageDependencies) {
for (let uuid of blockRefDependencies) {
if(blockEmbededDependencies.has(uuid)) continue;
let block = await SyncronizedLogseq.Editor.getBlock(uuid);
let block_content = _.get(block, 'content','');
block_content = replace(block_content, MD_PROPERTIES_REGEXP, "");
block_content = replace(block_content, ORG_PROPERTIES_REGEXP, "");
let block_content_first_line = block_content.split("\n").find(line => line.trim() != "");
toHash.push({content:block_content_first_line, format:_.get(block, 'format','markdown'), parent:_.get(block, 'parent.id',''), left:_.get(block, 'left.id','')});
}
for (let PageEntityName of pageEmbededDependencies) {
let page = await SyncronizedLogseq.Editor.getPage(PageEntityName.name);
toHash.push({content:_.get(page, 'updatedAt','')});
}
Expand Down

0 comments on commit 67553bd

Please sign in to comment.