scratch-blocks本地脱机编译教程,全平台通用

江奕
2023-12-01

估计搞scratch-blocks二次开发的,编译就挡住了大部分scratch二次开发者(Mac除外)。

官方默认的编译方式是远程调用google-closure-complier在线压缩工具,由于众所周知的原因,国内调用很不稳定,即使用代理,因为数据流较大,也很难成功编译。

官方还提供了本地编译的方法,但是也很容易出问题。

魔改了官方的编译脚本,使用google-closure-complier.jar本地编译,可以很快速的本地编译。

二次开发版已上传至Github,下载后可直接编译。
https://github.com/open-scratch/scratch-blocks

准备工作:

本教程使用win10的ubuntu wsl系统

  • 安装nodejs

  • 安装jdk

  • 安装python2
    一般Linux自带,无需安装

没错,三个环境

外人绝对无法理解为什么编译一个react包要用npm通过python来调用java

下载google-closure-complier.jar

放到根目录

魔改官方编译脚本

import sys
if sys.version_info[0] != 2:
  raise Exception("Blockly build only compatible with Python 2.x.\n"
                  "You are using: " + sys.version)

import errno, glob, httplib, json, os, re, subprocess, threading, urllib

CLOSURE_DIR = os.path.pardir
CLOSURE_ROOT = os.path.pardir
CLOSURE_LIBRARY = "closure-library"

CLOSURE_DIR_NPM = "node_modules"
CLOSURE_ROOT_NPM = os.path.join("node_modules")
CLOSURE_LIBRARY_NPM = "google-closure-library"
CLOSURE_COMPILER_NPM = "google-closure-compiler"

def import_path(fullpath):
  path, filename = os.path.split(fullpath)
  filename, ext = os.path.splitext(filename)
  sys.path.append(path)
  module = __import__(filename)
  reload(module)  # Might be out of date.
  del sys.path[-1]
  return module

def read(filename):
    f = open(filename)
    content = "".join(f.readlines())
    f.close()
    return content

HEADER = ("// Do not edit this file; automatically generated by build.py.\n"
          "'use strict';\n")

class Gen_uncompressed(threading.Thread):
  """Generate a JavaScript file that loads Blockly's raw files.
  Runs in a separate thread.
  """
  def __init__(self, search_paths, vertical, closure_env):
    threading.Thread.__init__(self)
    self.search_paths = search_paths
    self.vertical = vertical
    self.closure_env = closure_env

  def run(self):
    if self.vertical:
      target_filename = 'blockly_uncompressed_vertical.js'
    else:
      target_filename = 'blockly_uncompressed_horizontal.js'
    f = open(target_filename, 'w')
    f.write(HEADER)
    f.write(self.format_js("""
var isNodeJS = !!(typeof module !== 'undefined' && module.exports &&
                  typeof window === 'undefined');

if (isNodeJS) {
  var window = {};
  require('{closure_library}');
}

window.BLOCKLY_DIR = (function() {
  if (!isNodeJS) {
    // Find name of current directory.
    var scripts = document.getElementsByTagName('script');
    var re = new RegExp('(.+)[\/]blockly_uncompressed(_vertical|_horizontal|)\.js$');
    for (var i = 0, script; script = scripts[i]; i++) {
      var match = re.exec(script.src);
      if (match) {
        return match[1];
      }
    }
    alert('Could not detect Blockly\\'s directory name.');
  }
  return '';
})();

window.BLOCKLY_BOOT = function() {
  var dir = '';
  if (isNodeJS) {
    require('{closure_library}');
    dir = 'blockly';
  } else {
    // Execute after Closure has loaded.
    if (!window.goog) {
      alert('Error: Closure not found.  Read this:\\n' +
            'developers.google.com/blockly/guides/modify/web/closure');
    }
    if (window.BLOCKLY_DIR.search(/node_modules/)) {
      dir = '..';
    } else {
      dir = window.BLOCKLY_DIR.match(/[^\\/]+$/)[0];
    }
  }
"""))
    add_dependency = []
    base_path = calcdeps.FindClosureBasePath(self.search_paths)
    for dep in calcdeps.BuildDependenciesFromFiles(self.search_paths):
      add_dependency.append(calcdeps.GetDepsLine(dep, base_path))
    add_dependency.sort()  # Deterministic build.
    add_dependency = '\n'.join(add_dependency)
    # Find the Blockly directory name and replace it with a JS variable.
    # This allows blockly_uncompressed.js to be compiled on one computer and be
    # used on another, even if the directory name differs.
    m = re.search('[\\/]([^\\/]+)[\\/]core[\\/]blockly.js', add_dependency)
    add_dependency = re.sub('([\\/])' + re.escape(m.group(1)) +
        '([\\/]core[\\/])', '\\1" + dir + "\\2', add_dependency)
    f.write(add_dependency + '\n')

    provides = []
    for dep in calcdeps.BuildDependenciesFromFiles(self.search_paths):
      # starts with '../' or 'node_modules/'
      if not dep.filename.startswith(self.closure_env["closure_root"] + os.sep):
        provides.extend(dep.provides)
    provides.sort()  # Deterministic build.
    f.write('\n')
    f.write('// Load Blockly.\n')
    for provide in provides:
      f.write("goog.require('%s');\n" % provide)

    f.write(self.format_js("""
delete this.BLOCKLY_DIR;
delete this.BLOCKLY_BOOT;
};

if (isNodeJS) {
  window.BLOCKLY_BOOT();
  module.exports = Blockly;
} else {
  // Delete any existing Closure (e.g. Soy's nogoog_shim).
  document.write('<script>var goog = undefined;</script>');
  // Load fresh Closure Library.
  document.write('<script src="' + window.BLOCKLY_DIR +
      '/{closure_dir}/{closure_library}/closure/goog/base.js"></script>');
  document.write('<script>window.BLOCKLY_BOOT();</script>');
}
"""))
    f.close()
    print("SUCCESS: " + target_filename)

  def format_js(self, code):
    key_whitelist = self.closure_env.keys()
    keys_pipe_separated = reduce(lambda accum, key: accum + "|" + key, key_whitelist)
    begin_brace = re.compile(r"\{(?!%s)" % (keys_pipe_separated,))
    end_brace = re.compile(r"\}")
    def end_replacement(match):
      try:
        maybe_key = match.string[match.string[:match.start()].rindex("{") + 1:match.start()]
      except ValueError:
        return "}}"
      if maybe_key and maybe_key in key_whitelist:
        return "}"
      else:
        return "}}"

    return begin_brace.sub("{{", end_brace.sub(end_replacement, code)).format(**self.closure_env)

class Gen_compressed(threading.Thread):
  def __init__(self, search_paths_vertical, search_paths_horizontal, closure_env):
    threading.Thread.__init__(self)
    self.search_paths_vertical = search_paths_vertical
    self.search_paths_horizontal = search_paths_horizontal
    self.closure_env = closure_env

  def run(self):
    self.gen_core(True)
    self.gen_core(False)
    self.gen_blocks("horizontal")
    self.gen_blocks("vertical")
    self.gen_blocks("common")

  def gen_core(self, vertical):
    if vertical:
      target_filename = 'blockly_compressed_vertical.js'
      search_paths = self.search_paths_vertical
    else:
      target_filename = 'blockly_compressed_horizontal.js'
      search_paths = self.search_paths_horizontal
    # Define the parameters for the POST request.
    params = []

    # Read in all the source files.
    filenames = calcdeps.CalculateDependencies(search_paths,
      [os.path.join("core", "blockly.js")])
    filenames.sort()  # Deterministic build.
    for filename in filenames:
      # Append filenames as false arguments the step before compiling will
      # either transform them into arguments for local or remote compilation
      params.append(("js_file", filename))

    self.do_compile(params, target_filename, filenames, "")

  def gen_blocks(self, block_type):
    if block_type == "horizontal":
      target_filename = "blocks_compressed_horizontal.js"
      filenames = glob.glob(os.path.join("blocks_horizontal", "*.js"))
    elif block_type == "vertical":
      target_filename = "blocks_compressed_vertical.js"
      filenames = glob.glob(os.path.join("blocks_vertical", "*.js"))
    elif block_type == "common":
      target_filename = "blocks_compressed.js"
      filenames = glob.glob(os.path.join("blocks_common", "*.js"))

    # glob.glob ordering is platform-dependent and not necessary deterministic
    filenames.sort()  # Deterministic build.

    # Define the parameters for the POST request.
    params = []

    # Read in all the source files.
    # Add Blockly.Blocks to be compatible with the compiler.
    params.append(("js_file", os.path.join("build", "gen_blocks.js")))
    # Add Blockly.Colours for use of centralized colour bank
    filenames.append(os.path.join("core", "colours.js"))
    filenames.append(os.path.join("core", "constants.js"))
    
    for filename in filenames:
      # Append filenames as false arguments the step before compiling will
      # either transform them into arguments for local or remote compilation
      params.append(("js_file", filename))

    # Remove Blockly.Blocks to be compatible with Blockly.
    remove = "var Blockly={Blocks:{}};"
    self.do_compile(params, target_filename, filenames, remove)

  def do_compile(self, params, target_filename, filenames, remove):
    json_data = self.do_compile_jar(params, target_filename)
    if self.report_errors(target_filename, filenames, json_data):
      self.write_output(target_filename, remove, json_data)
      self.report_stats(target_filename, json_data)


  def do_compile_jar(self, params, target_filename):
    dash_params = ["--compilation_level SIMPLE", "--language_in ECMASCRIPT_2017", "--language_out ECMASCRIPT5", "--define='goog.DEBUG=false'", "--rewrite_polyfills=false"]
    for (arg, value) in params:
      if arg == "js_file":
        dash_params.append("--js='" + value + "'")
    args = []
    for group in [["java -jar google-closure-compiler.jar"], dash_params]:
      args.extend(group)
    # print args
    if sys.platform == "darwin":
      proc = subprocess.Popen(" ".join(args), stdin=subprocess.PIPE, stdout=subprocess.PIPE)
    else:
      proc = subprocess.Popen(" ".join(args), stdin=subprocess.PIPE, stdout=subprocess.PIPE, shell=True)
    (stdout, stderr) = proc.communicate()
    
    print stdout
    print stderr
    # Build the JSON response.
    filesizes = [os.path.getsize(value) for (arg, value) in params if arg == "js_file"]
    return dict(
      compiledCode=stdout,
      statistics=dict(
        originalSize=reduce(lambda v, size: v + size, filesizes, 0),
        compressedSize=len(stdout),
      )
    )


  def report_errors(self, target_filename, filenames, json_data):
    def file_lookup(name):
      if not name.startswith("Input_"):
        return "???"
      n = int(name[6:]) - 1
      return filenames[n]

    if json_data.has_key("serverErrors"):
      errors = json_data["serverErrors"]
      for error in errors:
        print("SERVER ERROR: %s" % target_filename)
        print(error["error"])
    elif json_data.has_key("errors"):
      errors = json_data["errors"]
      for error in errors:
        print("FATAL ERROR")
        print(error["error"])
        if error["file"]:
          print("%s at line %d:" % (
              file_lookup(error["file"]), error["lineno"]))
          print(error["line"])
          print((" " * error["charno"]) + "^")
        sys.exit(1)
    else:
      if json_data.has_key("warnings"):
        warnings = json_data["warnings"]
        for warning in warnings:
          print("WARNING")
          print(warning["warning"])
          if warning["file"]:
            print("%s at line %d:" % (
                file_lookup(warning["file"]), warning["lineno"]))
            print(warning["line"])
            print((" " * warning["charno"]) + "^")
        print()

      return True

    return False

  def write_output(self, target_filename, remove, json_data):
      if not json_data.has_key("compiledCode"):
        print("FATAL ERROR: Compiler did not return compiledCode.")
        sys.exit(1)

      code = HEADER + "\n" + json_data["compiledCode"]
      code = code.replace(remove, "")

      # Trim down Google's (and only Google's) Apache licences.
      # The Closure Compiler preserves these.
      LICENSE = re.compile("""/\\*

 [\w ]+

 Copyright \\d+ Google Inc.
 https://developers.google.com/blockly/

 Licensed under the Apache License, Version 2.0 \(the "License"\);
 you may not use this file except in compliance with the License.
 You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

 Unless required by applicable law or agreed to in writing, software
 distributed under the License is distributed on an "AS IS" BASIS,
 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
\\*/""")
      code = re.sub(LICENSE, "", code)

      stats = json_data["statistics"]
      original_b = stats["originalSize"]
      compressed_b = stats["compressedSize"]
      if original_b > 0 and compressed_b > 0:
        f = open(target_filename, "w")
        f.write(code)
        f.close()

  def report_stats(self, target_filename, json_data):
      stats = json_data["statistics"]
      original_b = stats["originalSize"]
      compressed_b = stats["compressedSize"]
      if original_b > 0 and compressed_b > 0:
        original_kb = int(original_b / 1024 + 0.5)
        compressed_kb = int(compressed_b / 1024 + 0.5)
        ratio = int(float(compressed_b) / float(original_b) * 100 + 0.5)
        print("SUCCESS: " + target_filename)
        print("Size changed from %d KB to %d KB (%d%%)." % (
            original_kb, compressed_kb, ratio))
      else:
        print("UNKNOWN ERROR")


class Gen_langfiles(threading.Thread):
  """Generate JavaScript file for each natural language supported.

  Runs in a separate thread.
  """

  def __init__(self):
    threading.Thread.__init__(self)

  def _rebuild(self, srcs, dests):
    # Determine whether any of the files in srcs is newer than any in dests.
    try:
      return (max(os.path.getmtime(src) for src in srcs) >
              min(os.path.getmtime(dest) for dest in dests))
    except OSError as e:
      # Was a file not found?
      if e.errno == errno.ENOENT:
        # If it was a source file, we can't proceed.
        if e.filename in srcs:
          print("Source file missing: " + e.filename)
          sys.exit(1)
        else:
          # If a destination file was missing, rebuild.
          return True
      else:
        print("Error checking file creation times: " + e)

  def run(self):
    # The files msg/json/{en,qqq,synonyms}.json depend on msg/messages.js.
    if self._rebuild([os.path.join("msg", "messages.js")],
                     [os.path.join("msg", "json", f) for f in
                      ["en.json", "qqq.json", "synonyms.json"]]):
      try:
        subprocess.check_call([
            "python",
            os.path.join("i18n", "js_to_json.py"),
            "--input_file", "msg/messages.js",
            "--output_dir", "msg/json/",
            "--quiet"])
      except (subprocess.CalledProcessError, OSError) as e:
        # Documentation for subprocess.check_call says that CalledProcessError
        # will be raised on failure, but I found that OSError is also possible.
        print("Error running i18n/js_to_json.py: ", e)
        sys.exit(1)


    try:
      # Use create_messages.py to create .js files from .json files.
      cmd = [
          "python",
          os.path.join("i18n", "create_messages.py"),
          "--source_lang_file", os.path.join("msg", "json", "en.json"),
          "--source_synonym_file", os.path.join("msg", "json", "synonyms.json"),
          "--source_constants_file", os.path.join("msg", "json", "constants.json"),
          "--key_file", os.path.join("msg", "json", "keys.json"),
          "--output_dir", os.path.join("msg", "js"),
          "--quiet"]
      json_files = glob.glob(os.path.join("msg", "json", "*.json"))
      json_files = [file for file in json_files if not
                    (file.endswith(("keys.json", "synonyms.json", "qqq.json", "constants.json")))]
      cmd.extend(json_files)
      subprocess.check_call(cmd)
    except (subprocess.CalledProcessError, OSError) as e:
      print("Error running i18n/create_messages.py: ", e)
      sys.exit(1)

    # Output list of .js files created.
    for f in json_files:
      # This assumes the path to the current directory does not contain "json".
      f = f.replace("json", "js")
      if os.path.isfile(f):
        print("SUCCESS: " + f)
      else:
        print("FAILED to create " + f)

def exclude_vertical(item):
  return not item.endswith("block_render_svg_vertical.js")

def exclude_horizontal(item):
  return not item.endswith("block_render_svg_horizontal.js")

if __name__ == "__main__":
  closure_dir = CLOSURE_DIR_NPM
  closure_root = CLOSURE_ROOT_NPM
  closure_library = CLOSURE_LIBRARY_NPM
  closure_compiler = CLOSURE_COMPILER_NPM

  # Load calcdeps from the local library
  calcdeps = import_path(os.path.join(
      closure_root, closure_library, "closure", "bin", "calcdeps.py"))

  print("Using local compiler: google-closure-compiler.jar ...\n")


  search_paths = calcdeps.ExpandDirectories(
      ["core", os.path.join(closure_root, closure_library)])

  search_paths_horizontal = filter(exclude_vertical, search_paths)
  search_paths_vertical = filter(exclude_horizontal, search_paths)

  closure_env = {
    "closure_dir": closure_dir,
    "closure_root": closure_root,
    "closure_library": closure_library,
    "closure_compiler": closure_compiler,
  }

  # Run all tasks in parallel threads.
  # Uncompressed is limited by processor speed.
  # Compressed is limited by network and server speed.
  # Vertical:
  Gen_uncompressed(search_paths_vertical, True, closure_env).start()
  # Horizontal:
  Gen_uncompressed(search_paths_horizontal, False, closure_env).start()

  # Compressed forms of vertical and horizontal.
  Gen_compressed(search_paths_vertical, search_paths_horizontal, closure_env).start()

  # This is run locally in a separate thread.
  # Gen_langfiles().start()

编译

sudo npm install

npm run prepublish
 类似资料: