How to use io.Pipe() for chunking a Large file using Erasure Coding through ReedSolomon Library

370 views Asked by At

Using https://github.com/klauspost/reedsolomon, erasure coding a large file into smaller chunks, I want to use io.Pipe() to create a stream of pipes which connects each other. For example: one pipe which helps in Chunking the file and other which calls func Upload for uploading the file

func (rd *ReedSolomon) Chunking() {
    fname := rd.File

    // Create encoding matrix.
    enc, err := reedsolomon.NewStream(rd.data, rd.Par)
    checkErr(err)

    fmt.Println("Opening", fname)
    f, err := os.Open(fname)
    checkErr(err)

    instat, err := f.Stat()
    checkErr(err)

    shards := rd.Data + rd.Par
    out := make([]*os.File, shards)

    // Create the resulting files.
    dir, file := filepath.Split(fname)
    if fi.OutDir != "" {
        dir = fi.OutDir
    }

    for i := range out {
        outfn := fmt.Sprintf("%s.%d", file, i)
        out[i], err = os.Create(filepath.Join(dir, outfn))
        checkErr(err)
    }

    // Split into files.
    data := make([]io.Writer, rd.Data)
    for i := range data {
        data[i] = out[i]
    }

    // Do the split
    err = enc.Split(f, data, instat.Size())
    checkErr(err)

    // Close and re-open the files.
    input := make([]io.Reader, rd.Data)
    target_url := "http://localhost:8080"

    for i := range data {
        out[i].Close()
        f, err := os.Open(out[i].Name())
        checkErr(err)
        input[i] = f
        defer f.Close()
    }

    // Create parity output writers
    parity := make([]io.Writer, rd.Par)
    for i := range parity {
        parity[i] = out[rd.Data+i]
        defer out[rd.Data+i].Close()
    }

    err = enc.Encode(input, parity)
    checkErr(err)

    for _, sd := range out {
        postFile(sd.Name(), target_url)
    }

}

I am pretty new to the use of the io.Pipe()

0

There are 0 answers