Skip to content
This repository

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse code

syntax errors, new tests

  • Loading branch information...
commit a623b810fe0e2ea4d808444e897c8cb888ea94c1 1 parent 0cce59d
cathywu authored

Showing 2 changed files with 37 additions and 37 deletions. Show diff stats Hide diff stats

  1. +2 2 classifier.py
  2. +35 35 movie.py
4 classifier.py
@@ -152,11 +152,11 @@ def vectorToString(self, vec, cls = False, binary=False):
152 152 # creates a string of the format "[class if point is labeled] feature1:value1 feature2:value2..."
153 153 # where the only allowed features are the ones in restrictFeatures, if we're restricting the features
154 154 if binary:
155   - return ((str(cls) + " ") if cls else "") +
  155 + return ((str(cls) + " ") if cls else "") + \
156 156 " ".join(["-".join(str(i).split()) + ":1"
157 157 for i in vec if (not self.restrictFeatures) or
158 158 (i in self.restrictFeatures)]) + "\n"
159   - return ((str(cls) + " ") if cls else "") +
  159 + return ((str(cls) + " ") if cls else "") + \
160 160 " ".join(["-".join(str(i).split()) + ":" + str(vec[i])
161 161 for i in vec if (not self.restrictFeatures) or
162 162 (i in self.restrictFeatures)]) + "\n"
70 movie.py
@@ -31,7 +31,7 @@ def __init__(self, clsf, n, ind, pos_dir, neg_dir, test_set=None,
31 31 self.limit = limit if limit else [0 for i in n]
32 32 self.clsf = clsf
33 33 self.idf = idf
34   - self.test_set = True if test_set else False
  34 + self.test_set = test_set
35 35 self.pos_dir = pos_dir
36 36 self.neg_dir = neg_dir
37 37
@@ -273,8 +273,8 @@ def test(classif, n=1, train_size=500, mode='k', iterations=1, dataset='',
273 273 # dataset='default',extra_dataset=1,limit=[16165],binary=True, idf=False)
274 274 #test(classifier.MaximumEntropyClassifier,n=[1],train_size=800,mode='k',iterations=3,
275 275 # dataset='default',extra_dataset=1,limit=[16165],binary=False, idf=False)
276   - test(classifier.LinearSVMClassifier,n=[1],train_size=800,mode='k',iterations=3,
277   - dataset='default',extra_dataset=1,limit=[16165],binary=True, idf=False)
  276 + #test(classifier.LinearSVMClassifier,n=[1],train_size=800,mode='k',iterations=3,
  277 + # dataset='default',extra_dataset=1,limit=[16165],binary=True, idf=False)
278 278 #
279 279 #test(classifier.BayesClassifier,n=[1],train_size=800,mode='k',iterations=3,
280 280 # dataset='default',extra_dataset=1,limit=None,binary=True, idf=False)
@@ -283,40 +283,40 @@ def test(classif, n=1, train_size=500, mode='k', iterations=1, dataset='',
283 283 #BORKED#test(classifier.LinearSVMClassifier,n=[1],train_size=800,mode='k',iterations=3,
284 284 #BORKED# dataset='default',extra_dataset=1,limit=None,binary=True, idf=False)
285 285
286   - test(classifier.BayesClassifier,n=[2],train_size=800,mode='k',iterations=3,
287   - dataset='default',extra_dataset=1,limit=[16165],binary=True, idf=False)
288   - test(classifier.MaximumEntropyClassifier,n=[2],train_size=800,mode='k',iterations=3,
289   - dataset='default',extra_dataset=1,limit=[16165],binary=False, idf=False)
290   - test(classifier.LinearSVMClassifier,n=[2],train_size=800,mode='k',iterations=3,
291   - dataset='default',extra_dataset=1,limit=[16165],binary=True, idf=False)
  286 + #test(classifier.BayesClassifier,n=[2],train_size=800,mode='k',iterations=3,
  287 + # dataset='default',extra_dataset=1,limit=[16165],binary=True, idf=False)
  288 + #test(classifier.MaximumEntropyClassifier,n=[2],train_size=800,mode='k',iterations=3,
  289 + # dataset='default',extra_dataset=1,limit=[16165],binary=False, idf=False)
  290 + #test(classifier.LinearSVMClassifier,n=[2],train_size=800,mode='k',iterations=3,
  291 + # dataset='default',extra_dataset=1,limit=[16165],binary=True, idf=False)
292 292
293   - test(classifier.BayesClassifier,n=[1,2],train_size=800,mode='k',iterations=3,
294   - dataset='default',extra_dataset=1,limit=[16165,16165],binary=True, idf=False)
295   - test(classifier.MaximumEntropyClassifier,n=[1,2],train_size=800,mode='k',iterations=3,
296   - dataset='default',extra_dataset=1,limit=[16165,16165],binary=False, idf=False)
297   - test(classifier.LinearSVMClassifier,n=[1,2],train_size=800,mode='k',iterations=3,
298   - dataset='default',extra_dataset=1,limit=[16165,16165],binary=True, idf=False)
  293 + #test(classifier.BayesClassifier,n=[1,2],train_size=800,mode='k',iterations=3,
  294 + # dataset='default',extra_dataset=1,limit=[16165,16165],binary=True, idf=False)
  295 + #test(classifier.MaximumEntropyClassifier,n=[1,2],train_size=800,mode='k',iterations=3,
  296 + # dataset='default',extra_dataset=1,limit=[16165,16165],binary=False, idf=False)
  297 + #test(classifier.LinearSVMClassifier,n=[1,2],train_size=800,mode='k',iterations=3,
  298 + # dataset='default',extra_dataset=1,limit=[16165,16165],binary=True, idf=False)
299 299
300   - test(classifier.BayesClassifier,n=[1],train_size=800,mode='k',iterations=3,
301   - dataset='partofspeech',extra_dataset=1,limit=None,binary=True, idf=False)
302   - test(classifier.MaximumEntropyClassifier,n=[1],train_size=800,mode='k',iterations=3,
303   - dataset='partofspeech',extra_dataset=1,limit=None,binary=False, idf=False)
304   - test(classifier.LinearSVMClassifier,n=[1],train_size=800,mode='k',iterations=3,
305   - dataset='partofspeech',extra_dataset=1,limit=None,binary=True, idf=False)
306   -
307   - test(classifier.BayesClassifier,n=[1],train_size=800,mode='k',iterations=3,
308   - dataset='adjectives',extra_dataset=1,limit=None,binary=True, idf=False)
309   - test(classifier.MaximumEntropyClassifier,n=[1],train_size=800,mode='k',iterations=3,
310   - dataset='adjectives',extra_dataset=1,limit=None,binary=False, idf=False)
311   - test(classifier.LinearSVMClassifier,n=[1],train_size=800,mode='k',iterations=3,
312   - dataset='adjectives',extra_dataset=1,limit=None,binary=True, idf=False)
313   -
314   - test(classifier.BayesClassifier,n=[1],train_size=800,mode='k',iterations=3,
315   - dataset='position',extra_dataset=1,limit=None,binary=True, idf=False)
316   - test(classifier.MaximumEntropyClassifier,n=[1],train_size=800,mode='k',iterations=3,
317   - dataset='position',extra_dataset=1,limit=None,binary=False, idf=False)
318   - test(classifier.LinearSVMClassifier,n=[1],train_size=800,mode='k',iterations=3,
319   - dataset='position',extra_dataset=1,limit=None,binary=True, idf=False)
  300 + #test(classifier.BayesClassifier,n=[1],train_size=800,mode='k',iterations=3,
  301 + # dataset='partofspeech',extra_dataset=1,limit=None,binary=True, idf=False)
  302 + #test(classifier.MaximumEntropyClassifier,n=[1],train_size=800,mode='k',iterations=3,
  303 + # dataset='partofspeech',extra_dataset=1,limit=None,binary=False, idf=False)
  304 + #BORKED#test(classifier.LinearSVMClassifier,n=[1],train_size=800,mode='k',iterations=3,
  305 + #BORKED# dataset='partofspeech',extra_dataset=1,limit=None,binary=True, idf=False)
  306 +
  307 + #test(classifier.BayesClassifier,n=[1],train_size=800,mode='k',iterations=3,
  308 + # dataset='adjectives',extra_dataset=1,limit=None,binary=True, idf=False)
  309 + #test(classifier.MaximumEntropyClassifier,n=[1],train_size=800,mode='k',iterations=3,
  310 + # dataset='adjectives',extra_dataset=1,limit=None,binary=False, idf=False)
  311 + #test(classifier.LinearSVMClassifier,n=[1],train_size=800,mode='k',iterations=3,
  312 + # dataset='adjectives',extra_dataset=1,limit=None,binary=True, idf=False)
  313 +
  314 + #test(classifier.BayesClassifier,n=[1],train_size=800,mode='k',iterations=3,
  315 + # dataset='position',extra_dataset=1,limit=None,binary=True, idf=False)
  316 + #test(classifier.MaximumEntropyClassifier,n=[1],train_size=800,mode='k',iterations=3,
  317 + # dataset='position',extra_dataset=1,limit=None,binary=False, idf=False)
  318 + #BORKED#test(classifier.LinearSVMClassifier,n=[1],train_size=800,mode='k',iterations=3,
  319 + #BORKED# dataset='position',extra_dataset=1,limit=None,binary=True, idf=False)
320 320
321 321 #mvc = MajorityVotingTester()
322 322 #ind = Indexes(mode='k',iterations=3,train_size=800)

0 comments on commit a623b81

Please sign in to comment.
Something went wrong with that request. Please try again.