Skip to content

Commit

Permalink
chore: update dependencies
Browse files Browse the repository at this point in the history
  • Loading branch information
targos committed Aug 8, 2023
1 parent c5010ee commit f73f798
Show file tree
Hide file tree
Showing 11 changed files with 51 additions and 50 deletions.
24 changes: 11 additions & 13 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,29 +27,27 @@
"check-types": "tsc --noEmit",
"clean": "rimraf lib lib-esm",
"eslint": "eslint src --cache",
"eslint:fix": "npm run eslint -- --fix",
"eslint-fix": "npm run eslint -- --fix",
"prepack": "npm run tsc",
"prettier": "prettier --check src",
"prettier:fix": "prettier --write src",
"prettier-write": "prettier --write src",
"test": "npm run test-only && npm run eslint && npm run prettier && npm run check-types",
"test-only": "jest --coverage",
"tsc": "npm run clean && npm run tsc-cjs && npm run tsc-esm",
"tsc-cjs": "tsc --project tsconfig.cjs.json",
"tsc-esm": "tsc --project tsconfig.esm.json"
},
"devDependencies": {
"@types/jest": "^29.2.3",
"cheminfo-build": "^1.1.11",
"cheminfo-types": "^1.4.0",
"eslint": "^8.25.0",
"eslint-config-cheminfo-typescript": "^11.2.2",
"eslint-plugin-import": "^2.28.0",
"jest": "^29.3.1",
"prettier": "^2.7.1",
"ts-jest": "^29.0.3",
"typescript": "^4.9.3"
"@types/jest": "^29.5.3",
"cheminfo-types": "^1.7.2",
"eslint": "^8.46.0",
"eslint-config-cheminfo-typescript": "^12.0.4",
"jest": "^29.6.2",
"prettier": "^3.0.1",
"ts-jest": "^29.1.1",
"typescript": "^5.1.6"
},
"dependencies": {
"iobuffer": "^5.2.1"
"iobuffer": "^5.3.2"
}
}
2 changes: 1 addition & 1 deletion src/__tests__/attributeExists.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ const pathFiles = `${__dirname}/files/`;
test('attributeExists', () => {
const data = readFileSync(`${pathFiles}P071.CDF`);

let reader = new NetCDFReader(data);
const reader = new NetCDFReader(data);
expect(reader.attributeExists('operator_name')).toBe(true);
expect(reader.attributeExists('operator_nameXX')).toBe(false);
});
2 changes: 1 addition & 1 deletion src/__tests__/dataVariableExists.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ const pathFiles = `${__dirname}/files/`;
test('dataVariableExists', () => {
const data = readFileSync(`${pathFiles}P071.CDF`);

let reader = new NetCDFReader(data);
const reader = new NetCDFReader(data);
expect(reader.dataVariableExists('instrument_name')).toBe(true);
expect(reader.dataVariableExists('instrument_nameXX')).toBe(false);
});
2 changes: 1 addition & 1 deletion src/__tests__/getAttribute.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ const pathFiles = `${__dirname}/files/`;
test('getAttribute', () => {
const data = readFileSync(`${pathFiles}P071.CDF`);

let reader = new NetCDFReader(data);
const reader = new NetCDFReader(data);
expect(reader.getAttribute('operator_name')).toBe('SC');
});
2 changes: 1 addition & 1 deletion src/__tests__/getDataVariableAsString.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ const pathFiles = `${__dirname}/files/`;
test('getDataVariableAsString', () => {
const data = readFileSync(`${pathFiles}P071.CDF`);

let reader = new NetCDFReader(data);
const reader = new NetCDFReader(data);
expect(reader.getDataVariableAsString('instrument_name')).toBe(
'Gas Chromatograph',
);
Expand Down
28 changes: 14 additions & 14 deletions src/__tests__/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ describe('Read file', () => {
// http://www.unidata.ucar.edu/software/netcdf/examples/madis-sao.cdl
const data = readFileSync(`${pathFiles}madis-sao.nc`);

let reader = new NetCDFReader(data);
const reader = new NetCDFReader(data);
expect(reader.version).toBe('classic format');
expect(reader.recordDimension).toStrictEqual({
length: 178,
Expand Down Expand Up @@ -94,14 +94,14 @@ describe('Read file', () => {

it('read non-record variable', () => {
const data = readFileSync(`${pathFiles}madis-sao.nc`);
let reader = new NetCDFReader(data);
const reader = new NetCDFReader(data);

expect(reader.getDataVariable('nStaticIds')[0]).toBe(145);
});

it('read 2 dimensional variable', () => {
const data = readFileSync(`${pathFiles}ichthyop.nc`);
let reader = new NetCDFReader(data);
const reader = new NetCDFReader(data);
expect(reader.getDataVariable('time')).toHaveLength(49);
expect(reader.getDataVariable('time')[0]).toBe(1547070300);
expect(reader.getDataVariable('lat')).toHaveLength(49);
Expand All @@ -112,21 +112,21 @@ describe('Read file', () => {

it('read record variable with string', () => {
const data = readFileSync(`${pathFiles}madis-sao.nc`);
let reader = new NetCDFReader(data);
const reader = new NetCDFReader(data);

let record = reader.getDataVariable('wmoId');
const record = reader.getDataVariable('wmoId');
expect(record[0]).toBe(71419);
expect(record[1]).toBe(71415);
expect(record[2]).toBe(71408);
});

it('read non-record variable with object', () => {
const data = readFileSync(`${pathFiles}madis-sao.nc`);
let reader = new NetCDFReader(data);
let variables = reader.variables;
const reader = new NetCDFReader(data);
const variables = reader.variables;

let withString = reader.getDataVariable('staticIds');
let withObject = reader.getDataVariable(variables[1]);
const withString = reader.getDataVariable('staticIds');
const withObject = reader.getDataVariable(variables[1]);
expect(withString[0]).toBe('W');
expect(withString[1]).toBe('A');
expect(withString[2]).toBe('F');
Expand All @@ -137,7 +137,7 @@ describe('Read file', () => {

it('read non-existent variable string', () => {
const data = readFileSync(`${pathFiles}madis-sao.nc`);
let reader = new NetCDFReader(data);
const reader = new NetCDFReader(data);

expect(reader.getDataVariable.bind(reader, "n'importe quoi")).toThrow(
'Not a valid NetCDF v3.x file: variable not found',
Expand All @@ -146,21 +146,21 @@ describe('Read file', () => {

it('read 64 bit offset file', () => {
const data = readFileSync(`${pathFiles}model1_md2.nc`);
let reader = new NetCDFReader(data);
const reader = new NetCDFReader(data);
expect(reader.version).toBe('64-bit offset format');
expect(reader.getDataVariable('cell_angular')[0]).toBe('a');
expect(reader.getDataVariable('cell_spatial')[0]).toBe('a');
});

it('read agilent hplc file file', () => {
const data = readFileSync(`${pathFiles}agilent_hplc.cdf`);
let reader = new NetCDFReader(data);
const reader = new NetCDFReader(data);

expect(reader.version).toBe('classic format');

let variables = [];
const variables = [];

for (let variable of reader.variables) {
for (const variable of reader.variables) {
const value = reader.getDataVariable(variable);
variables.push({ value, ...variable });
}
Expand Down
2 changes: 1 addition & 1 deletion src/__tests__/toString.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ const pathFiles = `${__dirname}/files/`;
test('toString', () => {
const data = readFileSync(`${pathFiles}P071.CDF`);

let reader = new NetCDFReader(data);
const reader = new NetCDFReader(data);
expect(reader.toString()).toMatchSnapshot();
});
10 changes: 5 additions & 5 deletions src/data.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,15 @@ import { num2bytes, str2num, readType } from './types';
export function nonRecord(
buffer: IOBuffer,
variable: Header['variables'][number],
): ReturnType<typeof readType>[] {
): Array<ReturnType<typeof readType>> {
// variable type
const type = str2num(variable.type);

// size of the data
const size = variable.size / num2bytes(type);

// iterates over the data
let data = new Array(size);
const data = new Array(size);
for (let i = 0; i < size; i++) {
data[i] = readType(buffer, type, 1);
}
Expand All @@ -40,7 +40,7 @@ export function record(
buffer: IOBuffer,
variable: Header['variables'][number],
recordDimension: Header['recordDimension'],
): ReturnType<typeof readType>[] {
): Array<ReturnType<typeof readType>> {
// variable type
const type = str2num(variable.type);
const width = variable.size ? variable.size / num2bytes(type) : 1;
Expand All @@ -50,11 +50,11 @@ export function record(
const size = recordDimension.length;

// iterates over the data
let data = new Array(size);
const data = new Array(size);
const step = recordDimension.recordStep;
if (step) {
for (let i = 0; i < size; i++) {
let currentOffset = buffer.offset;
const currentOffset = buffer.offset;
data[i] = readType(buffer, type, width);
buffer.seek(currentOffset + step);
}
Expand Down
17 changes: 10 additions & 7 deletions src/header.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,12 +66,12 @@ export function header(buffer: IOBuffer, version: number): Header {

export interface Dimensions {
/* that is an array of dimension object:*/
dimensions: {
dimensions: Array<{
/* name of the dimension*/
name: string;
/* size of the dimension */
size: number;
}[];
}>;
/* id of the dimension that has unlimited size or undefined,*/
recordId?: number;
/* name of the dimension that has unlimited size */
Expand Down Expand Up @@ -203,7 +203,10 @@ export interface Variable {
/* True if is a record variable, false otherwise (unlimited size) */
record: boolean;
}
type Variables = { variables: Variable[]; recordStep: number };
interface Variables {
variables: Variable[];
recordStep: number;
}
/**
* @param buffer - Buffer for the file data
* @param recordId - Id of the unlimited dimension (also called record dimension)
Expand Down Expand Up @@ -233,22 +236,22 @@ function variablesList(
variables = new Array(variableSize);
for (let v = 0; v < variableSize; v++) {
// Read name
let name = readName(buffer);
const name = readName(buffer);

// Read dimensionality of the variable
const dimensionality = buffer.readUint32();

// Index into the list of dimensions
let dimensionsIds = new Array(dimensionality);
const dimensionsIds = new Array(dimensionality);
for (let dim = 0; dim < dimensionality; dim++) {
dimensionsIds[dim] = buffer.readUint32();
}

// Read variables size
let attributes = attributesList(buffer);
const attributes = attributesList(buffer);

// Read type
let type = buffer.readUint32();
const type = buffer.readUint32();
notNetcdf(type < 1 && type > 6, `non valid type ${type}`);

// Read variable size
Expand Down
10 changes: 5 additions & 5 deletions src/toString.ts
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
import { NetCDFReader } from './parser';

export function toString(this: NetCDFReader) {
let result = [];
const result = [];
result.push('DIMENSIONS');
for (let dimension of this.dimensions) {
for (const dimension of this.dimensions) {
result.push(` ${dimension.name.padEnd(30)} = size: ${dimension.size}`);
}

result.push('');
result.push('GLOBAL ATTRIBUTES');
for (let attribute of this.globalAttributes) {
for (const attribute of this.globalAttributes) {
result.push(` ${attribute.name.padEnd(30)} = ${attribute.value}`);
}

let variables = JSON.parse(JSON.stringify(this.variables));
const variables = JSON.parse(JSON.stringify(this.variables));
result.push('');
result.push('VARIABLES:');
for (let variable of variables) {
for (const variable of variables) {
variable.value = this.getDataVariable(variable);
let stringify = JSON.stringify(variable.value);
if (stringify.length > 50) stringify = stringify.substring(0, 50);
Expand Down
2 changes: 1 addition & 1 deletion src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ function readNumber(
bufferReader: () => number,
): number | number[] {
if (size !== 1) {
let numbers = new Array(size);
const numbers = new Array(size);
for (let i = 0; i < size; i++) {
numbers[i] = bufferReader();
}
Expand Down

0 comments on commit f73f798

Please sign in to comment.