Permalink
Browse files

isOK() page check and incorporated calling to TreeModel for visual

update into TreeNodePageWrapper
  • Loading branch information...
1 parent e6088b2 commit 297e27ee27d67c60e0e4253f079d2fee4d10b07b Acerbic committed Oct 6, 2012
View
@@ -16,7 +16,6 @@
import dloader.gui.MyWorker;
import dloader.gui.TreeNodePageWrapper;
import dloader.page.AbstractPage;
-import dloader.page.Track;
import javax.swing.tree.*;
@@ -109,7 +108,7 @@ public void actionPerformed(ActionEvent e) {
// tree.setCellRenderer(new MyRenderer());
tree.setBorder(new BevelBorder(BevelBorder.LOWERED, null, null, null, null));
- lblStatus = new JLabel("kkkkss");
+ lblStatus = new JLabel("Status messages");
lblStatus.setHorizontalAlignment(SwingConstants.CENTER);
lblStatus.setFont(new Font("Tahoma", Font.PLAIN, 18));
@@ -207,17 +206,15 @@ public void init() {
textFieldDirectory.setText(Main.saveTo);
chckbxLog.setSelected( Main.logger != null);
chckbxUseCache.setSelected(Main.allowFromCache);
- textFieldURL.setText(AbstractPage.fixURLString(null, Main.baseURL));
try {
rootPage = AbstractPage.bakeAPage(null, Main.baseURL, Main.saveTo, null);
+ textFieldURL.setText(rootPage.url.toString());
} catch (IllegalArgumentException e) {
return;
}
lblStatus.setText("Preparing");
- TreeNodePageWrapper x = new TreeNodePageWrapper(null); // proxy null root for better display
- x.add(new TreeNodePageWrapper(rootPage));
- ((DefaultTreeModel) tree.getModel()).setRoot(x);
+ setRootNodeForRootPage();
initPrefetch();
}
@@ -230,16 +227,27 @@ private void initPrefetch() {
theWorker = new MyWorker(rootPage, JobType.READCACHEPAGES);
theWorker.execute();
btnPrefetch.setEnabled(false);
+ btnFetch.setEnabled(false);
+ btnFix.setEnabled(false);
+ btnRetag.setEnabled(false);
+ btnUpdate.setEnabled(false);
}
}
private void finishPrefetch() {
if (lblStatus.getText().equals("Prefetching"))
lblStatus.setText("");
btnPrefetch.setEnabled(true);
+ btnPrefetch.setEnabled(true);
+ btnFetch.setEnabled(true);
+ btnFix.setEnabled(true);
+ btnRetag.setEnabled(true);
+ btnUpdate.setEnabled(true);
theWorker = null;
unfoldFirst();
+ // commented out for later. now we are in testing mode.
+// initScan();
}
/**
@@ -286,15 +294,20 @@ private void reInit() {
}
if (newRootPage != null) {
rootPage = newRootPage;
+ setRootNodeForRootPage();
- TreeNodePageWrapper x = new TreeNodePageWrapper(null); // proxy null root for better display
- x.add(new TreeNodePageWrapper(rootPage));
- ((DefaultTreeModel) tree.getModel()).setRoot(x);
initPrefetch();
btnPrefetch.setEnabled(false);
}
}
+ private void setRootNodeForRootPage() {
+ DefaultTreeModel model = (DefaultTreeModel) tree.getModel();
+ TreeNodePageWrapper x = new TreeNodePageWrapper(null, model); // proxy null root for better display
+ x.add(new TreeNodePageWrapper(rootPage, model));
+ model.setRoot(x);
+
+ }
/**
* Captures SwingWorker finish jobs event
* @param root - root job for the work in question (not used atm)
@@ -343,7 +356,7 @@ else if (!p.getParent().childPages.contains(p))
// currentPage's node was not found in parent node
// add new item under this parent
- childNode = new TreeNodePageWrapper(childPage);
+ childNode = new TreeNodePageWrapper(childPage, model);
if (childPage.getParent() == null)
parentNode.add(childNode); // to the end
@@ -358,7 +371,7 @@ else if (!p.getParent().childPages.contains(p))
// new item's children if any (that way they maintain their order)
for (AbstractPage subPage: childPage.childPages) {
- TreeNodePageWrapper subChild = new TreeNodePageWrapper(subPage);
+ TreeNodePageWrapper subChild = new TreeNodePageWrapper(subPage, model);
childNode.add(subChild);
}
@@ -387,17 +400,10 @@ else if (!p.getParent().childPages.contains(p))
unfoldFirst();
// pass message to the user object and refresh its visual if needed
- // XXX: consider using EventListener mechanic
- if (parentNode.update(message, value)) {
- model.nodeChanged(parentNode);
- if (parentNode.page instanceof Track) {
- TreeNodePageWrapper gParentNode = (TreeNodePageWrapper) parentNode.getParent();
- if (gParentNode != null) {
- gParentNode.kidChanged(parentNode, gParentNode, message, value);
- model.nodeChanged(gParentNode);
- }
- }
- }
+ parentNode.update(message, value);
+// if (parentNode.update(message, value)) {
+// model.nodeChanged(parentNode);
+// }
}
@@ -3,6 +3,8 @@
import java.util.Enumeration;
import javax.swing.tree.DefaultMutableTreeNode;
+import javax.swing.tree.DefaultTreeModel;
+import javax.swing.tree.TreeModel;
import dloader.page.AbstractPage;
import dloader.page.Track;
@@ -24,6 +26,7 @@
private static final long serialVersionUID = -265090747493368344L;
public final AbstractPage page; //wrapped object
+ public final DefaultTreeModel model; //backref to model
// TODO job progress flags and logs
boolean readFromCache = false;
@@ -36,20 +39,21 @@
int kidsInProcessing = 0;
- public TreeNodePageWrapper(AbstractPage page) {
+ public TreeNodePageWrapper(AbstractPage page, TreeModel treeModel) {
super(null);
+ this.model = (DefaultTreeModel) treeModel;
this.page = page;
}
/**
* Update flags and states of this node visual representation
- * @param type
- * @param report
+ * @param message
+ * @param value
* @return true if node must be repainted
*/
- public boolean update(String type, long report) {
+ public boolean update(String message, long value) {
boolean updateVisuals = false;
- switch (type) {
+ switch (message) {
//messages reported by ReadCacheJob and GetPageJob:
case "checking cache": break;
case "read from cache":
@@ -95,6 +99,16 @@ public boolean update(String type, long report) {
*/
}
+
+ if (page instanceof Track) {
+ TreeNodePageWrapper parentNode = (TreeNodePageWrapper) getParent();
+ if (parentNode != null) {
+ parentNode.kidChanged(this, message, value);
+ model.nodeChanged(parentNode);
+ }
+ }
+ if (updateVisuals)
+ model.nodeChanged(this);
return updateVisuals;
}
@@ -126,13 +140,13 @@ public String toString() {
page.childPages.size()+"]</span>";
if (downloading) {
- title = "Scanning... " + title;
+ title = title + " (Scanning...)";
}
else if (downloadPageFailed) {
title = "Scan failed: " + title;
styleCompilation += "span#title {font: bold}";
} else if (downloadPageQ) {
- title = "In queue for scan... " + title;
+ title = title + " (In queue for scan...)";
}
// finalize title
@@ -153,12 +167,11 @@ else if (downloadPageFailed) {
"</u>"+ bottom;
}
- public void kidChanged(TreeNodePageWrapper kidWrapper,
- DefaultMutableTreeNode thisNode, String message, long value) {
+ public void kidChanged(TreeNodePageWrapper kidWrapper, String message, long value) {
kidsInProcessing = 0;
for (@SuppressWarnings("unchecked")
- Enumeration<DefaultMutableTreeNode> children = thisNode.children(); children.hasMoreElements();) {
+ Enumeration<DefaultMutableTreeNode> children = children(); children.hasMoreElements();) {
TreeNodePageWrapper kid = (TreeNodePageWrapper) children.nextElement();
if (kid.downloading || kid.downloadPageQ)
@@ -87,7 +87,6 @@
* Due to efficiency issues, elements should be added as .addAll(...) when possible
*/
public final
-// Queue<AbstractPage> childPages = new ConcurrentLinkedQueue<>();
List<AbstractPage> childPages = new CopyOnWriteArrayList<>();
@@ -121,10 +120,6 @@ public AbstractPage(String stringURL, String saveTo, AbstractPage parent) throws
catch (MalformedURLException e) {throw new IllegalArgumentException(e);}
catch (NullPointerException e) {throw new IllegalArgumentException(e);}
- //path in question may not exist at this point and will be created by parent page later.
-// Path p = Paths.get(saveTo);
-// if (! (Files.isDirectory(p) && Files.isWritable(p)))
-// throw new IllegalArgumentException();
this.saveTo = saveTo;
this.parent = parent;
}
@@ -161,6 +156,7 @@ String getFSSafeName(String name) throws IOException {
return name;
}
+ // TODO: this assumes children type is fixed and controlled by parent type, but it may not hold true, i.e. Track within Discography without intermediary album
/**
* Returns an XPath string to get links to children pages
* from current page. All tags in the path are in "pre" namespace.
@@ -591,7 +587,7 @@ public AbstractPage getChildByURLString(String string) {
*/
//XXX: may be should be complemented with "lastUpdated" time stamp.
public
- boolean isPageOK() {
+ boolean isOK() {
if (getTitle()==null || getTitle().isEmpty())
return false;
return true;
@@ -163,7 +163,7 @@ boolean isSavingNotRequired() {
}
@Override
- public boolean isPageOK() {
- return super.isPageOK() && (coverUrl != null);
+ public boolean isOK() {
+ return super.isOK() && (coverUrl != null);
}
}
@@ -351,8 +351,8 @@ String getTrackFileName() {
}
@Override
- public boolean isPageOK() {
- if (! super.isPageOK() ) return false;
+ public boolean isOK() {
+ if (! super.isOK() ) return false;
for (String p: XMLCacheDataKeys)
if (getProperty(p) == null)
@@ -7,7 +7,7 @@
* Job to read page data from cache and if cache is unavailable - download.
* Starts the same jobs for children nodes.
*
- * NOTE the "package-default" visibility on class, as it is used only be other class - DownloadPage.
+ * NOTE the "package-default" visibility on class, as it is used only be other class - UpdatePageJob.
* @author Acerbic
*
*/
@@ -26,12 +26,12 @@ public GetPageJob(AbstractPage page, JobMaster owner) {
@Override
public void run() {
report ("checking cache", 1);
- if (page.loadFromCache()) {
+ if (page.loadFromCache() && page.isOK()) {
//note: this iterator does not require locking because of CopyOnWriteArrayList implementation
for (AbstractPage child: page.childPages)
jobMaster.submit(new GetPageJob(child,jobMaster));
report("read from cache", 1);
- } else {
+ } else {
report("cache reading failed, submitting download job", 1);
jobMaster.submit(new UpdatePageJob(page,jobMaster, false));
}
@@ -47,11 +47,6 @@ public void run() {
else
report("cache reading failed", 1);
try {
-// try {
-// Thread.sleep(3000);
-// } catch (InterruptedException e) {
-// e.printStackTrace();
-// }
if (page.updateFromNet(this) || forceDownload) {
page.saveToCache();
//note: this iterator does not require locking because of CopyOnWriteArrayList implementation
@@ -46,19 +46,19 @@ public void tearDown() throws Exception {
public void testCaching() {
Main.cache = new XMLCache("test/pages_scan_cache.xml");
Discography page = new Discography("http://homestuck.bandcamp.com",null,null);
- assertFalse(page.isPageOK());
+ assertFalse(page.isOK());
assertTrue(page.loadFromCache());
- assertTrue(page.isPageOK());
+ assertTrue(page.isOK());
assertEquals("Homestuck", page.getTitle());
assertEquals(15, page.childPages.size());
Main.cache = new XMLCache("test/nullCache.xml"); // not existing file (new cache)
page.saveToCache();
Discography page2 = new Discography("http://homestuck.bandcamp.com",null,null);
- assertFalse(page2.isPageOK());
+ assertFalse(page2.isOK());
assertTrue(page2.loadFromCache());
- assertTrue(page2.isPageOK());
+ assertTrue(page2.isOK());
assertEquals("Homestuck", page2.getTitle());
assertEquals(15, page2.childPages.size());
@@ -75,23 +75,23 @@ public void testUpdating() {
fail("can't load from file");
}
assertNotNull(page.getChildByURLString("file:/album/homestuck-vol-9")); //the way relative url is resolved
- assertTrue(page.isPageOK());
+ assertTrue(page.isOK());
assertEquals("Homestuck", page.getTitle());
assertEquals(20, page.childPages.size());
try {
assertFalse(page.updateFromNet(pr));
} catch (ProblemsReadingDocumentException e) {
fail("can't load from file");
}
- assertTrue(page.isPageOK());
+ assertTrue(page.isOK());
page.childPages.clear(); // now page is different.
try {
assertTrue(page.updateFromNet(pr));
} catch (ProblemsReadingDocumentException e) {
fail("can't load from file");
}
- assertTrue(page.isPageOK());
+ assertTrue(page.isOK());
}
@Test

0 comments on commit 297e27e

Please sign in to comment.