123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370 |
- // CodeMirror, copyright (c) by Marijn Haverbeke and others
- // Distributed under an MIT license: http://codemirror.net/LICENSE
- /**
- * Link to the project's GitHub page:
- * https://github.com/pickhardt/coffeescript-codemirror-mode
- */
- (function(mod) {
- if (typeof exports == "object" && typeof module == "object") // CommonJS
- mod(require("../../lib/codemirror"));
- else if (typeof define == "function" && define.amd) // AMD
- define(["../../lib/codemirror"], mod);
- else // Plain browser env
- mod(CodeMirror);
- })(function(CodeMirror) {
- "use strict";
- CodeMirror.defineMode("coffeescript", function(conf, parserConf) {
- var ERRORCLASS = "error";
- function wordRegexp(words) {
- return new RegExp("^((" + words.join(")|(") + "))\\b");
- }
- var operators = /^(?:->|=>|\+[+=]?|-[\-=]?|\*[\*=]?|\/[\/=]?|[=!]=|<[><]?=?|>>?=?|%=?|&=?|\|=?|\^=?|\~|!|\?|(or|and|\|\||&&|\?)=)/;
- var delimiters = /^(?:[()\[\]{},:`=;]|\.\.?\.?)/;
- var identifiers = /^[_A-Za-z$][_A-Za-z$0-9]*/;
- var properties = /^(@|this\.)[_A-Za-z$][_A-Za-z$0-9]*/;
- var wordOperators = wordRegexp(["and", "or", "not",
- "is", "isnt", "in",
- "instanceof", "typeof"]);
- var indentKeywords = ["for", "while", "loop", "if", "unless", "else",
- "switch", "try", "catch", "finally", "class"];
- var commonKeywords = ["break", "by", "continue", "debugger", "delete",
- "do", "in", "of", "new", "return", "then",
- "this", "@", "throw", "when", "until", "extends"];
- var keywords = wordRegexp(indentKeywords.concat(commonKeywords));
- indentKeywords = wordRegexp(indentKeywords);
- var stringPrefixes = /^('{3}|\"{3}|['\"])/;
- var regexPrefixes = /^(\/{3}|\/)/;
- var commonConstants = ["Infinity", "NaN", "undefined", "null", "true", "false", "on", "off", "yes", "no"];
- var constants = wordRegexp(commonConstants);
- // Tokenizers
- function tokenBase(stream, state) {
- // Handle scope changes
- if (stream.sol()) {
- if (state.scope.align === null) state.scope.align = false;
- var scopeOffset = state.scope.offset;
- if (stream.eatSpace()) {
- var lineOffset = stream.indentation();
- if (lineOffset > scopeOffset && state.scope.type == "coffee") {
- return "indent";
- } else if (lineOffset < scopeOffset) {
- return "dedent";
- }
- return null;
- } else {
- if (scopeOffset > 0) {
- dedent(stream, state);
- }
- }
- }
- if (stream.eatSpace()) {
- return null;
- }
- var ch = stream.peek();
- // Handle docco title comment (single line)
- if (stream.match("####")) {
- stream.skipToEnd();
- return "comment";
- }
- // Handle multi line comments
- if (stream.match("###")) {
- state.tokenize = longComment;
- return state.tokenize(stream, state);
- }
- // Single line comment
- if (ch === "#") {
- stream.skipToEnd();
- return "comment";
- }
- // Handle number literals
- if (stream.match(/^-?[0-9\.]/, false)) {
- var floatLiteral = false;
- // Floats
- if (stream.match(/^-?\d*\.\d+(e[\+\-]?\d+)?/i)) {
- floatLiteral = true;
- }
- if (stream.match(/^-?\d+\.\d*/)) {
- floatLiteral = true;
- }
- if (stream.match(/^-?\.\d+/)) {
- floatLiteral = true;
- }
- if (floatLiteral) {
- // prevent from getting extra . on 1..
- if (stream.peek() == "."){
- stream.backUp(1);
- }
- return "number";
- }
- // Integers
- var intLiteral = false;
- // Hex
- if (stream.match(/^-?0x[0-9a-f]+/i)) {
- intLiteral = true;
- }
- // Decimal
- if (stream.match(/^-?[1-9]\d*(e[\+\-]?\d+)?/)) {
- intLiteral = true;
- }
- // Zero by itself with no other piece of number.
- if (stream.match(/^-?0(?![\dx])/i)) {
- intLiteral = true;
- }
- if (intLiteral) {
- return "number";
- }
- }
- // Handle strings
- if (stream.match(stringPrefixes)) {
- state.tokenize = tokenFactory(stream.current(), false, "string");
- return state.tokenize(stream, state);
- }
- // Handle regex literals
- if (stream.match(regexPrefixes)) {
- if (stream.current() != "/" || stream.match(/^.*\//, false)) { // prevent highlight of division
- state.tokenize = tokenFactory(stream.current(), true, "string-2");
- return state.tokenize(stream, state);
- } else {
- stream.backUp(1);
- }
- }
- // Handle operators and delimiters
- if (stream.match(operators) || stream.match(wordOperators)) {
- return "operator";
- }
- if (stream.match(delimiters)) {
- return "punctuation";
- }
- if (stream.match(constants)) {
- return "atom";
- }
- if (stream.match(keywords)) {
- return "keyword";
- }
- if (stream.match(identifiers)) {
- return "variable";
- }
- if (stream.match(properties)) {
- return "property";
- }
- // Handle non-detected items
- stream.next();
- return ERRORCLASS;
- }
- function tokenFactory(delimiter, singleline, outclass) {
- return function(stream, state) {
- while (!stream.eol()) {
- stream.eatWhile(/[^'"\/\\]/);
- if (stream.eat("\\")) {
- stream.next();
- if (singleline && stream.eol()) {
- return outclass;
- }
- } else if (stream.match(delimiter)) {
- state.tokenize = tokenBase;
- return outclass;
- } else {
- stream.eat(/['"\/]/);
- }
- }
- if (singleline) {
- if (parserConf.singleLineStringErrors) {
- outclass = ERRORCLASS;
- } else {
- state.tokenize = tokenBase;
- }
- }
- return outclass;
- };
- }
- function longComment(stream, state) {
- while (!stream.eol()) {
- stream.eatWhile(/[^#]/);
- if (stream.match("###")) {
- state.tokenize = tokenBase;
- break;
- }
- stream.eatWhile("#");
- }
- return "comment";
- }
- function indent(stream, state, type) {
- type = type || "coffee";
- var offset = 0, align = false, alignOffset = null;
- for (var scope = state.scope; scope; scope = scope.prev) {
- if (scope.type === "coffee" || scope.type == "}") {
- offset = scope.offset + conf.indentUnit;
- break;
- }
- }
- if (type !== "coffee") {
- align = null;
- alignOffset = stream.column() + stream.current().length;
- } else if (state.scope.align) {
- state.scope.align = false;
- }
- state.scope = {
- offset: offset,
- type: type,
- prev: state.scope,
- align: align,
- alignOffset: alignOffset
- };
- }
- function dedent(stream, state) {
- if (!state.scope.prev) return;
- if (state.scope.type === "coffee") {
- var _indent = stream.indentation();
- var matched = false;
- for (var scope = state.scope; scope; scope = scope.prev) {
- if (_indent === scope.offset) {
- matched = true;
- break;
- }
- }
- if (!matched) {
- return true;
- }
- while (state.scope.prev && state.scope.offset !== _indent) {
- state.scope = state.scope.prev;
- }
- return false;
- } else {
- state.scope = state.scope.prev;
- return false;
- }
- }
- function tokenLexer(stream, state) {
- var style = state.tokenize(stream, state);
- var current = stream.current();
- // Handle "." connected identifiers
- if (current === ".") {
- style = state.tokenize(stream, state);
- current = stream.current();
- if (/^\.[\w$]+$/.test(current)) {
- return "variable";
- } else {
- return ERRORCLASS;
- }
- }
- // Handle scope changes.
- if (current === "return") {
- state.dedent = true;
- }
- if (((current === "->" || current === "=>") &&
- !state.lambda &&
- !stream.peek())
- || style === "indent") {
- indent(stream, state);
- }
- var delimiter_index = "[({".indexOf(current);
- if (delimiter_index !== -1) {
- indent(stream, state, "])}".slice(delimiter_index, delimiter_index+1));
- }
- if (indentKeywords.exec(current)){
- indent(stream, state);
- }
- if (current == "then"){
- dedent(stream, state);
- }
- if (style === "dedent") {
- if (dedent(stream, state)) {
- return ERRORCLASS;
- }
- }
- delimiter_index = "])}".indexOf(current);
- if (delimiter_index !== -1) {
- while (state.scope.type == "coffee" && state.scope.prev)
- state.scope = state.scope.prev;
- if (state.scope.type == current)
- state.scope = state.scope.prev;
- }
- if (state.dedent && stream.eol()) {
- if (state.scope.type == "coffee" && state.scope.prev)
- state.scope = state.scope.prev;
- state.dedent = false;
- }
- return style;
- }
- var external = {
- startState: function(basecolumn) {
- return {
- tokenize: tokenBase,
- scope: {offset:basecolumn || 0, type:"coffee", prev: null, align: false},
- lastToken: null,
- lambda: false,
- dedent: 0
- };
- },
- token: function(stream, state) {
- var fillAlign = state.scope.align === null && state.scope;
- if (fillAlign && stream.sol()) fillAlign.align = false;
- var style = tokenLexer(stream, state);
- if (fillAlign && style && style != "comment") fillAlign.align = true;
- state.lastToken = {style:style, content: stream.current()};
- if (stream.eol() && stream.lambda) {
- state.lambda = false;
- }
- return style;
- },
- indent: function(state, text) {
- if (state.tokenize != tokenBase) return 0;
- var scope = state.scope;
- var closer = text && "])}".indexOf(text.charAt(0)) > -1;
- if (closer) while (scope.type == "coffee" && scope.prev) scope = scope.prev;
- var closes = closer && scope.type === text.charAt(0);
- if (scope.align)
- return scope.alignOffset - (closes ? 1 : 0);
- else
- return (closes ? scope.prev : scope).offset;
- },
- lineComment: "#",
- fold: "indent"
- };
- return external;
- });
- CodeMirror.defineMIME("text/x-coffeescript", "coffeescript");
- });
|