Skip to content
Permalink

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: beyondstorage/setup-hdfs
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: master
Choose a base ref
...
head repository: matrixorigin/setup-hdfs
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: master
Choose a head ref
Able to merge. These branches can be automatically merged.
  • 9 commits
  • 4 files changed
  • 1 contributor

Commits on Feb 25, 2025

  1. Update shell script

    guguducken authored Feb 25, 2025
    Copy the full SHA
    f31930c View commit details

Commits on Feb 26, 2025

  1. update dist js

    guguducken committed Feb 26, 2025
    Copy the full SHA
    b86ec3c View commit details
  2. use url instead of version

    guguducken committed Feb 26, 2025
    Copy the full SHA
    0edbe67 View commit details
  3. not use async exec

    guguducken committed Feb 26, 2025
    Copy the full SHA
    751fbc2 View commit details
  4. Revert "not use async exec"

    This reverts commit 751fbc2.
    guguducken committed Feb 26, 2025
    Copy the full SHA
    5b08109 View commit details
  5. use promisify

    guguducken committed Feb 26, 2025
    Copy the full SHA
    788245d View commit details
  6. build to dist

    guguducken committed Feb 26, 2025
    Copy the full SHA
    2b813db View commit details

Commits on Feb 27, 2025

  1. use domain instead of ip

    guguducken committed Feb 27, 2025
    Copy the full SHA
    ac5e414 View commit details
  2. fic

    guguducken committed Feb 27, 2025
    Copy the full SHA
    c103528 View commit details
Showing with 3,667 additions and 2,358 deletions.
  1. +2 −0 .gitignore
  2. +2 −1 action.yml
  3. +3,623 −2,311 dist/index.js
  4. +40 −46 src/setup-hdfs.ts
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
node_modules/
lib/
.idea
.vscode
3 changes: 2 additions & 1 deletion action.yml
Original file line number Diff line number Diff line change
@@ -3,9 +3,10 @@ name: 'Setup Apache HDFS'
description: 'Set up Apache Hadoop/HDFS and add the command-line tools to the PATH.'
author: 'beyondstorage'
inputs:
hdfs-version:
hdfs-download-url:
description: "The installed Apache Hadoop version."
required: true
default: "https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz"
runs:
using: 'node16'
main: 'dist/index.js'
5,934 changes: 3,623 additions & 2,311 deletions dist/index.js

Large diffs are not rendered by default.

86 changes: 40 additions & 46 deletions src/setup-hdfs.ts
Original file line number Diff line number Diff line change
@@ -1,25 +1,25 @@
import * as core from '@actions/core';
import {downloadTool, extractTar, cacheDir} from '@actions/tool-cache';
import {exec} from 'child_process';
import util from 'node:util';
import child_process from 'node:child_process';
import * as fs from 'fs';
import {promisify} from 'util';

const writeFile = promisify(fs.writeFile);
const exec = util.promisify(child_process.exec);

async function setup() {
// Fetch user input.
const hdfsVersion = core.getInput('hdfs-version');

const hdfsUrl = `https://dlcdn.apache.org/hadoop/common/hadoop-${hdfsVersion}/hadoop-${hdfsVersion}.tar.gz`;
const hdfsUrl = core.getInput('hdfs-download-url');

// Download hdfs and extract.
const hdfsTar = await downloadTool(hdfsUrl);
const hdfsFolder = (await extractTar(hdfsTar)) + `/hadoop-${hdfsVersion}`;
const hdfsFolder = (await extractTar(hdfsTar)) + `/hadoop-3.4.1`;

const coreSite = `<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
<value>hdfs://localhost:9820</value>
</property>
<property>
<name>hadoop.http.staticuser.user</name>
@@ -48,56 +48,50 @@ async function setup() {
</configuration>`;
await writeFile(`${hdfsFolder}/etc/hadoop/hdfs-site.xml`, hdfsSite);

const hdfsHome = await cacheDir(hdfsFolder, 'hdfs', hdfsVersion);
const hdfsHome = await cacheDir(hdfsFolder, 'hdfs', '3.4.1');

// Setup self ssh connection.
// Fix permission issues: https://github.community/t/ssh-test-using-github-action/166717/12
const cmd = `chmod g-w $HOME &&
chmod o-w $HOME &&
ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa &&
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys &&
chmod 0600 ~/.ssh/authorized_keys &&
ssh-keyscan -H localhost >> ~/.ssh/known_hosts &&
chmod 0600 ~/.ssh/known_hosts &&
eval \`ssh-agent\` &&
ssh-add ~/.ssh/id_rsa
const cmd = `set -ex;
chmod g-w $HOME;
chmod o-w $HOME;
ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa;
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys;
chmod 0600 ~/.ssh/authorized_keys;
ssh-keyscan -H localhost >> ~/.ssh/known_hosts;
chmod 0600 ~/.ssh/known_hosts;
eval \`ssh-agent\`;
ssh-add ~/.ssh/id_rsa;
`;
exec(cmd, (err: any, stdout: any, stderr: any) => {
core.info(stdout);
core.warning(stderr);
if (err) {
core.error('Setup self ssh failed');
throw new Error(err);
}
});
let result = await exec(cmd);
core.info(result.stdout);
core.warning(result.stderr);

core.info('Setup self ssh success');

// Start hdfs daemon.
exec(
`${hdfsHome}/bin/hdfs namenode -format`,
(err: any, stdout: any, stderr: any) => {
core.info(stdout);
core.warning(stderr);
if (err) {
core.error('Format hdfs namenode failed');
throw new Error(err);
}
}
);
result = await exec(`${hdfsHome}/bin/hdfs namenode -format`);
core.info(result.stdout);
core.warning(result.stderr);
core.info('Format hdfs namenode success');

exec(
`${hdfsHome}/sbin/start-dfs.sh`,
(err: any, stdout: any, stderr: any) => {
core.info(stdout);
core.warning(stderr);
if (err) {
core.error('Call start-dfs failed');
throw new Error(err);
}
}
result = await exec(`${hdfsHome}/sbin/start-dfs.sh`);
core.info(result.stdout);
core.warning(result.stderr);
core.info('Start hdfs success');

result = await exec(
`sudo bash -c 'echo "127.0.0.1 hadoop-namenode.hadoop.svc.cluster.local" >> /etc/hosts;'`
);
core.info(result.stdout);
core.warning(result.stderr);
core.info('Set hdfs name node domain success');

core.addPath(`${hdfsHome}/bin`);
core.exportVariable('HDFS_NAMENODE_ADDR', '127.0.0.1:9000');
core.exportVariable(
'HDFS_NAMENODE_ADDR',
'hadoop-namenode.hadoop.svc.cluster.local:9820'
);
core.exportVariable('HDFS_NAMENODE_HTTP_ADDR', '127.0.0.1:9870');
core.exportVariable('HADOOP_HOME', hdfsHome);
}