Skip to content

Commit

Permalink
char-rnn results
Browse files Browse the repository at this point in the history
  • Loading branch information
ppwwyyxx committed May 2, 2016
1 parent b059ce4 commit c939e0b
Show file tree
Hide file tree
Showing 2 changed files with 121 additions and 1 deletion.
117 changes: 117 additions & 0 deletions examples/char-rnn/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@

## Character-Level RNN Language Model

Generate linux kernel source code:
```c
static int via_playback_set_interrupt(int action, void *data, int val)
{
struct fpga_type *val_control = ah->ctrl_reg;
u32 h, copy_ex, val, NULL, { inheri; /* base of one irda->naturee */
u32 argv, lva; /* pointer to the root before ISAN threshold has secondary */
u8 version = sample_get_array(ir, agent); /* hv0 AG record much */
unsigned long $avp; \
write_insert(page + (v & (unsigned long)__xad_serial_page(p) | __PAGE_ACCESSICY_HEADER__());
__ATTR(bp, 1);
__put_user(buff, pg_free);*/
DE_PAGE(mpt, ptr, ipvecs, i).
(__GFP_IGNORE_IPSEC_NO(PEED_HDR(sp)))
__pdesc->f_ofname[0].buf->ptr[(__UA_MINSN_CACHE_SIZE].icwsr);

extent.diff = bufsize;
if (hash.b_format->fs_is_data.uscq.fd_type == 1)
___asm__ __vparams_allocate(avp_event)
__PAGE_SIZE +
up_va(iov, p,
p);

lead = image;
union agp.hfc * iov_offset =
(path->iv_max_position == map.node_size);
_avg = cause_offset;
} *va;
__u8 reference;
__get_user_exit(f_rio_type);
}
static void decode_free(unsigned long aregs)
{
return invalidated_in_sync(fs_info->remote_data, i);
}

void init_vm_voltage_one(struct iagenga_ops *omap2_version,
irq_set_value_cachep(indio_dev, 0, 0, "%s]")
: "_enable is enabled as enabled on !errcomvs output width"
* is the compressable.
* Archite inserted in the exception from the more than is a POWER_UNIRQ1_IMM_READ if a
* provides precision at hope if any packet
* Word. All given key start of the subtract to using the name.
* Now restore it later in attribute files.
*/
u8 num_irq; /* aligned */
ap_present = obj_stray;
acb->owner = parent;
rw29.phy_flag(port, UDATA306, vid, irq,
V4L2_CID_BURST, ioc->name);
else if (pins->irq_ops.intel_stall_min) {
/* record the address off */
ret = ath79_ap_ready_wfire_bits(
ERRORS \
"isp110x->having/nlan in, position",
cpu);

case 0:
case 3:
case 4:
board = capsblrmin(timeout); box_x0 = 6;/* 1^2 vmu, number versalise */
v = pvt->pages_index;

ring->un.formats[i] = event->value;
input_assert_rate(udc);
} else if (bp_tested(chip)) {
tmp1 = ACPI_ASUS_REG_TX1;
/*
* LS pending packets, 4==+1 pad word < 0 VBE is enabled 1
* it's added and this will have any device. an evfn allocators after the
* functions already read the kernel.
*/
if (up)
break;
}

sg_u_state(report);
err = -ENOTSUPP == IEEE80211_HT_PASSIVE_IDLE;

if (!*apa_header)
set_hard_updates(padapter, 0, -1);

if (!keymap_lookup)
return PTR_ERR(status);

if (!urb)
goto fastpath;
if (offload_mapped(data)) {
mappine = security_make_key(dev, size, var_data->end);
udelay((u8 *)out, sizeof(nesadapter->membase));
}

dev_err(dev->dev, "i2c reset, uart_enabled from %d\n", pdev);
icounture_htc_get_reg(i2c, port);
}
```
Generate paper: (trained on my personal folder of deep learning papers, converted to a messy pure-text format by `pdftotext`).
```
sample shows the network is the convex optimization, second to construct an object. the second task
is not use the expensive experimental responses to originally bit can be replaced by a prior model
and many iterations over tt-fc(u15) exactly on imagenet (cell are obviously as; our approach, tomas
use is made or unrows a challenge on grow use advantage in our variant and gradient recognition at
the performance gradient when there is an input, as well as the calling operation at label averaging
or larger class. in particular for intel-dependent annotation, use of the cifar-10 we prior way to
estimate the loss function. we also work that max-pooling on computation. for example, can be done
shown by r0 . the entire way for each entry phase in convolutional layers until thin, updates , but
output direction to max of the jacobian. then the gradient vectors with negating: recurrent neural
networks has random ordering (piteria. neuropenous words), in oise, svhn). there is clearly solved.
for example, ya& generally [banau, khoderrell, potentially, from recurrent keypoints. brnn tape and
weston, improving neural grammatical model flow images is allows belief networks. neural
generating neural networks, there is not the initial particular marked pseudo-cameral rnns
sophett, pattern wlth designs for faster than the inference in deep learning. in nips (most),
```
5 changes: 4 additions & 1 deletion examples/char-rnn.py → examples/char-rnn/char-rnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ def __init__(self, input_file, size):
self._size = size
self.rng = get_rng(self)

logger.info("Loading corpus...")
# preprocess data
with open(input_file) as f:
data = f.read()
Expand All @@ -55,6 +56,7 @@ def __init__(self, input_file, size):
param.vocab_size = self.vocab_size
self.lut = LookUpTable(self.chars)
self.whole_seq = np.array(list(map(self.lut.get_idx, data)), dtype='int32')
logger.info("Corpus loaded. Vocab size: {}".format(self.vocab_size))

def reset_state(self):
self.rng = get_rng(self)
Expand Down Expand Up @@ -126,7 +128,7 @@ def get_config():
StatPrinter(),
ModelSaver(),
#HumanHyperParamSetter('learning_rate', 'hyper.txt')
SeduledHyperParamSetter('learning_rate', [(25, 2e-4)])
ScheduledHyperParamSetter('learning_rate', [(25, 2e-4)])
]),
model=Model(),
step_per_epoch=step_per_epoch,
Expand Down Expand Up @@ -194,6 +196,7 @@ def pick(prob):

if args.command == 'sample':
param.softmax_temprature = args.temperature
assert args.load is not None, "Load your model by argument --load"
sample(args.load, args.start, args.num)
sys.exit()
else:
Expand Down

0 comments on commit c939e0b

Please sign in to comment.