github.com/jancarloviray/community@v0.41.1-0.20170124221257-33a66c87cf2f/app/public/codemirror/mode/sass/sass.js (about)

     1  // CodeMirror, copyright (c) by Marijn Haverbeke and others
     2  // Distributed under an MIT license: http://codemirror.net/LICENSE
     3  
     4  (function(mod) {
     5    if (typeof exports == "object" && typeof module == "object") // CommonJS
     6      mod(require("../../lib/codemirror"));
     7    else if (typeof define == "function" && define.amd) // AMD
     8      define(["../../lib/codemirror"], mod);
     9    else // Plain browser env
    10      mod(CodeMirror);
    11  })(function(CodeMirror) {
    12  "use strict";
    13  
    14  CodeMirror.defineMode("sass", function(config) {
    15    function tokenRegexp(words) {
    16      return new RegExp("^" + words.join("|"));
    17    }
    18  
    19    var keywords = ["true", "false", "null", "auto"];
    20    var keywordsRegexp = new RegExp("^" + keywords.join("|"));
    21  
    22    var operators = ["\\(", "\\)", "=", ">", "<", "==", ">=", "<=", "\\+", "-",
    23                     "\\!=", "/", "\\*", "%", "and", "or", "not", ";","\\{","\\}",":"];
    24    var opRegexp = tokenRegexp(operators);
    25  
    26    var pseudoElementsRegexp = /^::?[a-zA-Z_][\w\-]*/;
    27  
    28    function urlTokens(stream, state) {
    29      var ch = stream.peek();
    30  
    31      if (ch === ")") {
    32        stream.next();
    33        state.tokenizer = tokenBase;
    34        return "operator";
    35      } else if (ch === "(") {
    36        stream.next();
    37        stream.eatSpace();
    38  
    39        return "operator";
    40      } else if (ch === "'" || ch === '"') {
    41        state.tokenizer = buildStringTokenizer(stream.next());
    42        return "string";
    43      } else {
    44        state.tokenizer = buildStringTokenizer(")", false);
    45        return "string";
    46      }
    47    }
    48    function comment(indentation, multiLine) {
    49      return function(stream, state) {
    50        if (stream.sol() && stream.indentation() <= indentation) {
    51          state.tokenizer = tokenBase;
    52          return tokenBase(stream, state);
    53        }
    54  
    55        if (multiLine && stream.skipTo("*/")) {
    56          stream.next();
    57          stream.next();
    58          state.tokenizer = tokenBase;
    59        } else {
    60          stream.skipToEnd();
    61        }
    62  
    63        return "comment";
    64      };
    65    }
    66  
    67    function buildStringTokenizer(quote, greedy) {
    68      if (greedy == null) { greedy = true; }
    69  
    70      function stringTokenizer(stream, state) {
    71        var nextChar = stream.next();
    72        var peekChar = stream.peek();
    73        var previousChar = stream.string.charAt(stream.pos-2);
    74  
    75        var endingString = ((nextChar !== "\\" && peekChar === quote) || (nextChar === quote && previousChar !== "\\"));
    76  
    77        if (endingString) {
    78          if (nextChar !== quote && greedy) { stream.next(); }
    79          state.tokenizer = tokenBase;
    80          return "string";
    81        } else if (nextChar === "#" && peekChar === "{") {
    82          state.tokenizer = buildInterpolationTokenizer(stringTokenizer);
    83          stream.next();
    84          return "operator";
    85        } else {
    86          return "string";
    87        }
    88      }
    89  
    90      return stringTokenizer;
    91    }
    92  
    93    function buildInterpolationTokenizer(currentTokenizer) {
    94      return function(stream, state) {
    95        if (stream.peek() === "}") {
    96          stream.next();
    97          state.tokenizer = currentTokenizer;
    98          return "operator";
    99        } else {
   100          return tokenBase(stream, state);
   101        }
   102      };
   103    }
   104  
   105    function indent(state) {
   106      if (state.indentCount == 0) {
   107        state.indentCount++;
   108        var lastScopeOffset = state.scopes[0].offset;
   109        var currentOffset = lastScopeOffset + config.indentUnit;
   110        state.scopes.unshift({ offset:currentOffset });
   111      }
   112    }
   113  
   114    function dedent(state) {
   115      if (state.scopes.length == 1) return;
   116  
   117      state.scopes.shift();
   118    }
   119  
   120    function tokenBase(stream, state) {
   121      var ch = stream.peek();
   122  
   123      // Comment
   124      if (stream.match("/*")) {
   125        state.tokenizer = comment(stream.indentation(), true);
   126        return state.tokenizer(stream, state);
   127      }
   128      if (stream.match("//")) {
   129        state.tokenizer = comment(stream.indentation(), false);
   130        return state.tokenizer(stream, state);
   131      }
   132  
   133      // Interpolation
   134      if (stream.match("#{")) {
   135        state.tokenizer = buildInterpolationTokenizer(tokenBase);
   136        return "operator";
   137      }
   138  
   139      // Strings
   140      if (ch === '"' || ch === "'") {
   141        stream.next();
   142        state.tokenizer = buildStringTokenizer(ch);
   143        return "string";
   144      }
   145  
   146      if(!state.cursorHalf){// state.cursorHalf === 0
   147      // first half i.e. before : for key-value pairs
   148      // including selectors
   149  
   150        if (ch === ".") {
   151          stream.next();
   152          if (stream.match(/^[\w-]+/)) {
   153            indent(state);
   154            return "atom";
   155          } else if (stream.peek() === "#") {
   156            indent(state);
   157            return "atom";
   158          }
   159        }
   160  
   161        if (ch === "#") {
   162          stream.next();
   163          // ID selectors
   164          if (stream.match(/^[\w-]+/)) {
   165            indent(state);
   166            return "atom";
   167          }
   168          if (stream.peek() === "#") {
   169            indent(state);
   170            return "atom";
   171          }
   172        }
   173  
   174        // Variables
   175        if (ch === "$") {
   176          stream.next();
   177          stream.eatWhile(/[\w-]/);
   178          return "variable-2";
   179        }
   180  
   181        // Numbers
   182        if (stream.match(/^-?[0-9\.]+/))
   183          return "number";
   184  
   185        // Units
   186        if (stream.match(/^(px|em|in)\b/))
   187          return "unit";
   188  
   189        if (stream.match(keywordsRegexp))
   190          return "keyword";
   191  
   192        if (stream.match(/^url/) && stream.peek() === "(") {
   193          state.tokenizer = urlTokens;
   194          return "atom";
   195        }
   196  
   197        if (ch === "=") {
   198          // Match shortcut mixin definition
   199          if (stream.match(/^=[\w-]+/)) {
   200            indent(state);
   201            return "meta";
   202          }
   203        }
   204  
   205        if (ch === "+") {
   206          // Match shortcut mixin definition
   207          if (stream.match(/^\+[\w-]+/)){
   208            return "variable-3";
   209          }
   210        }
   211  
   212        if(ch === "@"){
   213          if(stream.match(/@extend/)){
   214            if(!stream.match(/\s*[\w]/))
   215              dedent(state);
   216          }
   217        }
   218  
   219  
   220        // Indent Directives
   221        if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)) {
   222          indent(state);
   223          return "meta";
   224        }
   225  
   226        // Other Directives
   227        if (ch === "@") {
   228          stream.next();
   229          stream.eatWhile(/[\w-]/);
   230          return "meta";
   231        }
   232  
   233        if (stream.eatWhile(/[\w-]/)){
   234          if(stream.match(/ *: *[\w-\+\$#!\("']/,false)){
   235            return "property";
   236          }
   237          else if(stream.match(/ *:/,false)){
   238            indent(state);
   239            state.cursorHalf = 1;
   240            return "atom";
   241          }
   242          else if(stream.match(/ *,/,false)){
   243            return "atom";
   244          }
   245          else{
   246            indent(state);
   247            return "atom";
   248          }
   249        }
   250  
   251        if(ch === ":"){
   252          if (stream.match(pseudoElementsRegexp)){ // could be a pseudo-element
   253            return "keyword";
   254          }
   255          stream.next();
   256          state.cursorHalf=1;
   257          return "operator";
   258        }
   259  
   260      } // cursorHalf===0 ends here
   261      else{
   262  
   263        if (ch === "#") {
   264          stream.next();
   265          // Hex numbers
   266          if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)){
   267            if(!stream.peek()){
   268              state.cursorHalf = 0;
   269            }
   270            return "number";
   271          }
   272        }
   273  
   274        // Numbers
   275        if (stream.match(/^-?[0-9\.]+/)){
   276          if(!stream.peek()){
   277            state.cursorHalf = 0;
   278          }
   279          return "number";
   280        }
   281  
   282        // Units
   283        if (stream.match(/^(px|em|in)\b/)){
   284          if(!stream.peek()){
   285            state.cursorHalf = 0;
   286          }
   287          return "unit";
   288        }
   289  
   290        if (stream.match(keywordsRegexp)){
   291          if(!stream.peek()){
   292            state.cursorHalf = 0;
   293          }
   294          return "keyword";
   295        }
   296  
   297        if (stream.match(/^url/) && stream.peek() === "(") {
   298          state.tokenizer = urlTokens;
   299          if(!stream.peek()){
   300            state.cursorHalf = 0;
   301          }
   302          return "atom";
   303        }
   304  
   305        // Variables
   306        if (ch === "$") {
   307          stream.next();
   308          stream.eatWhile(/[\w-]/);
   309          if(!stream.peek()){
   310            state.cursorHalf = 0;
   311          }
   312          return "variable-3";
   313        }
   314  
   315        // bang character for !important, !default, etc.
   316        if (ch === "!") {
   317          stream.next();
   318          if(!stream.peek()){
   319            state.cursorHalf = 0;
   320          }
   321          return stream.match(/^[\w]+/) ? "keyword": "operator";
   322        }
   323  
   324        if (stream.match(opRegexp)){
   325          if(!stream.peek()){
   326            state.cursorHalf = 0;
   327          }
   328          return "operator";
   329        }
   330  
   331        // attributes
   332        if (stream.eatWhile(/[\w-]/)) {
   333          if(!stream.peek()){
   334            state.cursorHalf = 0;
   335          }
   336          return "attribute";
   337        }
   338  
   339        //stream.eatSpace();
   340        if(!stream.peek()){
   341          state.cursorHalf = 0;
   342          return null;
   343        }
   344  
   345      } // else ends here
   346  
   347      if (stream.match(opRegexp))
   348        return "operator";
   349  
   350      // If we haven't returned by now, we move 1 character
   351      // and return an error
   352      stream.next();
   353      return null;
   354    }
   355  
   356    function tokenLexer(stream, state) {
   357      if (stream.sol()) state.indentCount = 0;
   358      var style = state.tokenizer(stream, state);
   359      var current = stream.current();
   360  
   361      if (current === "@return" || current === "}"){
   362        dedent(state);
   363      }
   364  
   365      if (style !== null) {
   366        var startOfToken = stream.pos - current.length;
   367  
   368        var withCurrentIndent = startOfToken + (config.indentUnit * state.indentCount);
   369  
   370        var newScopes = [];
   371  
   372        for (var i = 0; i < state.scopes.length; i++) {
   373          var scope = state.scopes[i];
   374  
   375          if (scope.offset <= withCurrentIndent)
   376            newScopes.push(scope);
   377        }
   378  
   379        state.scopes = newScopes;
   380      }
   381  
   382  
   383      return style;
   384    }
   385  
   386    return {
   387      startState: function() {
   388        return {
   389          tokenizer: tokenBase,
   390          scopes: [{offset: 0, type: "sass"}],
   391          indentCount: 0,
   392          cursorHalf: 0,  // cursor half tells us if cursor lies after (1)
   393                          // or before (0) colon (well... more or less)
   394          definedVars: [],
   395          definedMixins: []
   396        };
   397      },
   398      token: function(stream, state) {
   399        var style = tokenLexer(stream, state);
   400  
   401        state.lastToken = { style: style, content: stream.current() };
   402  
   403        return style;
   404      },
   405  
   406      indent: function(state) {
   407        return state.scopes[0].offset;
   408      }
   409    };
   410  });
   411  
   412  CodeMirror.defineMIME("text/x-sass", "sass");
   413  
   414  });