question-mark
Stuck on an issue?

Lightrun Answers was designed to reduce the constant googling that comes with debugging 3rd party libraries. It collects links to all the places you might be looking at while hunting down a tough bug.

And, if you’re still stuck at the end, we’re happy to hop on a call to see how we can help out.

Persistent storage with google cloud

See original GitHub issue

Eh, I think it was obvious I couldn’t deal with that myself either, sowwy.

After dropbox I tried giving this https://github.com/VeliovGroup/Meteor-Files/wiki/Google-Cloud-Storage-Integration a try. How I went:

  • Installed all the packages mentioned in the guide
  • Did all the google cloud configurations (downloaded json file etc.)
  • then took the code from the guide and inserted my stuff:
import { Meteor } from 'meteor/meteor';
import { Random } from 'meteor/random'
import { FilesCollection } from 'meteor/ostrio:files';

let gcloud, gcs, bucket, bucketMetadata, Request, bound, Collections = {};

if (Meteor.isServer) {
  // use require() as "'import' and 'export' may only appear at the top level"
  const Random = require('meteor/random');
  const Storage = require('@google-cloud/storage');
  gcs = new Storage('google-cloud')({
    projectId: 'my-project-123', // <-- Replace this with your project ID
    keyFilename: 'imports\api\bucketkey.json'  // <-- Replace this with the path to your key.json
  });
  bucket = gcs.bucket('my_bucket'); // <-- Replace this with your bucket name
  bucket.getMetadata(function(error, metadata, apiResponse) {
    if (error) {
      console.error(error);
    }
  });
  Request = Npm.require('request');
  bound = Meteor.bindEnvironment(function(callback) {
    return callback();
  });
}

Collections.files = new FilesCollection({
  debug: false, // Set to true to enable debugging messages
  storagePath: 'assets/app/uploads/uploadedFiles',
  collectionName: 'Images',
  allowClientCode: false,
  onAfterUpload(fileRef) {
    // In the onAfterUpload callback, we will move the file to Google Cloud Storage
    _.each(fileRef.versions, (vRef, version) => {
      // We use Random.id() instead of real file's _id
      // to secure files from reverse engineering
      // As after viewing this code it will be easy
      // to get access to unlisted and protected files
      const filePath = 'files/' + (Random.id()) + '-' + version + '.' + fileRef.extension;
      // Here we set the neccesary options to upload the file, for more options, see
      // https://googlecloudplatform.github.io/gcloud-node/#/docs/v0.36.0/storage/bucket?method=upload
      const options = {
        destination: filePath,
        resumable: true
      };

      bucket.upload(fileRef.path, options, (error, file) => {
        bound(() => {
          let upd;
          if (error) {
            console.error(error);
          } else {
            upd = {
              $set: {}
            };
            upd['$set'][`versions.${version}.meta.pipePath`] = filePath;
            this.collection.update({
              _id: fileRef._id
            }, upd, (updError) => {
              if (updError) {
                console.error(updError);
              } else {
                // Unlink original files from FS
                // after successful upload to Google Cloud Storage
                this.unlink(this.collection.findOne(fileRef._id), version);
              }
            });
          }
        });
      });
    });
  },
  interceptDownload(http, fileRef, version) {
    let ref, ref1, ref2;
    const path = (ref= fileRef.versions) != null ? (ref1 = ref[version]) != null ? (ref2 = ref1.meta) != null ? ref2.pipePath : void 0 : void 0 : void 0;
    const vRef = ref1;
    if (path) {
      // If file is moved to Google Cloud Storage
      // We will pipe request to Google Cloud Storage
      // So, original link will stay always secure
      const remoteReadStream = getReadableStream(http, path, vRef);
      this.serve(http, fileRef, vRef, version, remoteReadStream);
      return true;
    }
    // While the file has not been uploaded to Google Cloud Storage, we will serve it from the filesystem
    return false;
  }
});

if (Meteor.isServer) {
  // Intercept file's collection remove method to remove file from Google Cloud Storage
  const _origRemove = Collections.files.remove;

  Collections.files.remove = function(search) {
    const cursor = this.collection.find(search);
    cursor.forEach((fileRef) => {
      _.each(fileRef.versions, (vRef) => {
        let ref;
        if (vRef != null ? (ref = vRef.meta) != null ? ref.pipePath : void 0 : void 0) {
          bucket.file(vRef.meta.pipePath).delete((error) => {
            bound(() => {
              if (error) {
                console.error(error);
              }
            });
          });
        }
      });
    });
    // Call the original removal method
    _origRemove.call(this, search);
  };
}

function getReadableStream(http, path, vRef){
  let array, end, partial, remoteReadStream, reqRange, responseType, start, take;

  if (http.request.headers.range) {
    partial = true;
    array = http.request.headers.range.split(/bytes=([0-9]*)-([0-9]*)/);
    start = parseInt(array[1]);
    end = parseInt(array[2]);
    if (isNaN(end)) {
      end = vRef.size - 1;
    }
    take = end - start;
  } else {
    start = 0;
    end = vRef.size - 1;
    take = vRef.size;
  }

  if (partial || (http.params.query.play && http.params.query.play === 'true')) {
    reqRange = {
      start: start,
      end: end
    };
    if (isNaN(start) && !isNaN(end)) {
      reqRange.start = end - take;
      reqRange.end = end;
    }
    if (!isNaN(start) && isNaN(end)) {
      reqRange.start = start;
      reqRange.end = start + take;
    }
    if ((start + take) >= vRef.size) {
      reqRange.end = vRef.size - 1;
    }
    if ((reqRange.start >= (vRef.size - 1) || reqRange.end > (vRef.size - 1))) {
      responseType = '416';
    } else {
      responseType = '206';
    }
  } else {
    responseType = '200';
  }

  if (responseType === '206') {
    remoteReadStream = bucket.file(path).createReadStream({
      start: reqRange.start,
      end: reqRange.end
    });
  } else if (responseType === '200') {
    remoteReadStream = bucket.file(path).createReadStream();
  }

  return remoteReadStream;
}

This error occurs the moment I save the images.js file with that code:

W20190907-01:17:32.025(2)? (STDERR) C:\Users\NERV\AppData\Local\.meteor\packages\meteor-tool\1.8.1\mt-os.windows.x86_64\dev_bundle\server-lib\node_modules\fibers\future.js:280
W20190907-01:17:32.027(2)? (STDERR)                                             throw(ex);
W20190907-01:17:32.030(2)? (STDERR)                                             ^
W20190907-01:17:32.030(2)? (STDERR)
W20190907-01:17:32.031(2)? (STDERR) TypeError: Storage is not a constructor
W20190907-01:17:32.032(2)? (STDERR)     at images.js (imports/api/images/images.js:62:9)
W20190907-01:17:32.032(2)? (STDERR)     at fileEvaluate (packages\modules-runtime.js:336:7)
W20190907-01:17:32.033(2)? (STDERR)     at Module.require (packages\modules-runtime.js:238:14)
W20190907-01:17:32.034(2)? (STDERR)     at Module.moduleLink [as link] (C:\Users\NERV\AppData\Local\.meteor\packages\modules\0.13.0\npm\node_modules\reify\lib\runtime\index.js:38:38)
W20190907-01:17:32.035(2)? (STDERR)     at methods.js (imports/api/characters/methods.js:1:463)
W20190907-01:17:32.037(2)? (STDERR)     at fileEvaluate (packages\modules-runtime.js:336:7)
W20190907-01:17:32.042(2)? (STDERR)     at Module.require (packages\modules-runtime.js:238:14)
W20190907-01:17:32.043(2)? (STDERR)     at Module.moduleLink [as link] (C:\Users\NERV\AppData\Local\.meteor\packages\modules\0.13.0\npm\node_modules\reify\lib\runtime\index.js:38:38)
W20190907-01:17:32.044(2)? (STDERR)     at main.js (server/main.js:1:146)
W20190907-01:17:32.045(2)? (STDERR)     at fileEvaluate (packages\modules-runtime.js:336:7)
W20190907-01:17:32.045(2)? (STDERR)     at Module.require (packages\modules-runtime.js:238:14)
W20190907-01:17:32.048(2)? (STDERR)     at require (packages\modules-runtime.js:258:21)
W20190907-01:17:32.049(2)? (STDERR)     at C:\Users\NERV\Desktop\Projekte\DDCFull\.meteor\local\build\programs\server\app\app.js:4682:1
W20190907-01:17:32.049(2)? (STDERR)     at C:\Users\NERV\Desktop\Projekte\DDCFull\.meteor\local\build\programs\server\boot.js:419:36
W20190907-01:17:32.052(2)? (STDERR)     at Array.forEach (<anonymous>)
W20190907-01:17:32.052(2)? (STDERR)     at C:\Users\NERV\Desktop\Projekte\DDCFull\.meteor\local\build\programs\server\boot.js:228:19

Unable to resolve some modules:

  "worker_threads" in /C/Users/NERV/Desktop/Projekte/DDCFull/node_modules/write-file-atomic/index.js (web.browser.legacy)

If you notice problems related to these missing modules, consider running:

  meteor npm install --save worker_threads

=> Exited with code: 1
=> Your application is crashing. Waiting for file change.

I guess it’s about npm install @google-cloud/storage" ? I installed it as it is. I installed it while the cmd was inside my project directory, without writing meteor. Regarding the unresolved moduls “worker_threads” I tried doing what the log told me by doing meteor npm install --save worker_threads but it put out this

C:\Users\NERV\Desktop\Projekte\DDCFull>meteor npm install --save worker_threads
npm ERR! code E404
npm ERR! 404 Not Found - GET https://registry.npmjs.org/worker_threads - Not found
npm ERR! 404
npm ERR! 404  'worker_threads@latest' is not in the npm registry.
npm ERR! 404 You should bug the author to publish it (or use the name yourself!)
npm ERR! 404
npm ERR! 404 Note that you can also install from a
npm ERR! 404 tarball, folder, http url, or git url.

npm ERR! A complete log of this run can be found in:
npm ERR!     C:\Users\NERV\AppData\Roaming\npm-cache\_logs\2019-09-06T23_26_45_570Z-debug.log

Which makes me think this is not what I should do. Thank you for any help.

Issue Analytics

  • State:closed
  • Created 4 years ago
  • Comments:13 (7 by maintainers)

github_iconTop GitHub Comments

1reaction
TomokoOGcommented, Sep 21, 2019

Thank you! But I switched to cloudinary for file-storage and upload, I’m sorry.

0reactions
dr-dimitrucommented, Sep 21, 2019

@TomokoOG no worries, if this tool better suits your needs, it’s the right choice

Read more comments on GitHub >

github_iconTop Results From Across the Web

Persistent Disk: durable block storage - Google Cloud
Persistent Disk is Google's local durable storage service, fully integrated with Google Cloud products, Compute Engine and Google Kubernetes Engine.
Read more >
Google Cloud Storage Options: Object, Block, and File Storage
A Google Cloud Persistent Disk provides block storage and it is used by all virtual machines in Google Cloud (Google Cloud Compute Engine)....
Read more >
Google Cloud Compute Engine Storage Options
Google Cloud Compute Engine Storage has several storage options for the instances including Persistent Disks, Local SSDs, Cloud Storage etc.
Read more >
Google Cloud - Persistent Systems
Persistent is a google cloud partner delivering innovative cloud services which enables fast, flexible and scalable access to the Google cloud.
Read more >
Google Cloud Run - Add Persistent Storage
You can use Google Cloud Storage to store the files such as images, videos, and other static content. We are using the Google...
Read more >

github_iconTop Related Medium Post

No results found

github_iconTop Related StackOverflow Question

No results found

github_iconTroubleshoot Live Code

Lightrun enables developers to add logs, metrics and snapshots to live code - no restarts or redeploys required.
Start Free

github_iconTop Related Reddit Thread

No results found

github_iconTop Related Hackernoon Post

No results found

github_iconTop Related Tweet

No results found

github_iconTop Related Dev.to Post

No results found

github_iconTop Related Hashnode Post

No results found