OLD | NEW |
1 /* | 1 /* |
2 * This file is part of Adblock Plus <https://adblockplus.org/>, | 2 * This file is part of Adblock Plus <https://adblockplus.org/>, |
3 * Copyright (C) 2006-2017 eyeo GmbH | 3 * Copyright (C) 2006-2017 eyeo GmbH |
4 * | 4 * |
5 * Adblock Plus is free software: you can redistribute it and/or modify | 5 * Adblock Plus is free software: you can redistribute it and/or modify |
6 * it under the terms of the GNU General Public License version 3 as | 6 * it under the terms of the GNU General Public License version 3 as |
7 * published by the Free Software Foundation. | 7 * published by the Free Software Foundation. |
8 * | 8 * |
9 * Adblock Plus is distributed in the hope that it will be useful, | 9 * Adblock Plus is distributed in the hope that it will be useful, |
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of | 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of |
(...skipping 29 matching lines...) Expand all Loading... |
40 const rawRequestTypes = typeMap.XMLHTTPREQUEST | | 40 const rawRequestTypes = typeMap.XMLHTTPREQUEST | |
41 typeMap.WEBSOCKET | | 41 typeMap.WEBSOCKET | |
42 typeMap.WEBRTC | | 42 typeMap.WEBRTC | |
43 typeMap.OBJECT_SUBREQUEST | | 43 typeMap.OBJECT_SUBREQUEST | |
44 typeMap.PING | | 44 typeMap.PING | |
45 typeMap.OTHER; | 45 typeMap.OTHER; |
46 const whitelistableRequestTypes = httpRequestTypes | | 46 const whitelistableRequestTypes = httpRequestTypes | |
47 typeMap.WEBSOCKET | | 47 typeMap.WEBSOCKET | |
48 typeMap.WEBRTC; | 48 typeMap.WEBRTC; |
49 | 49 |
| 50 function callLater(func) |
| 51 { |
| 52 return new Promise(resolve => |
| 53 { |
| 54 let call = () => resolve(func()); |
| 55 |
| 56 // If this looks like Node.js, call process.nextTick, otherwise call |
| 57 // setTimeout. |
| 58 if (typeof process != "undefined") |
| 59 process.nextTick(call); |
| 60 else |
| 61 setTimeout(call, 0); |
| 62 }); |
| 63 } |
| 64 |
| 65 function async(callees, mapFunction) |
| 66 { |
| 67 if (!(Symbol.iterator in callees)) |
| 68 callees = [callees]; |
| 69 |
| 70 let lastPause = Date.now(); |
| 71 let index = 0; |
| 72 |
| 73 let promise = Promise.resolve(); |
| 74 |
| 75 for (let next of callees) |
| 76 { |
| 77 let currentIndex = index; |
| 78 |
| 79 promise = promise.then(() => |
| 80 { |
| 81 if (mapFunction) |
| 82 next = mapFunction(next, currentIndex); |
| 83 |
| 84 // If it has been 100ms or longer since the last call, take a pause. This |
| 85 // keeps the browser from freezing up. |
| 86 let now = Date.now(); |
| 87 if (now - lastPause >= 100) |
| 88 { |
| 89 lastPause = now; |
| 90 return callLater(next); |
| 91 } |
| 92 |
| 93 return next(); |
| 94 }); |
| 95 |
| 96 index++; |
| 97 } |
| 98 |
| 99 return promise; |
| 100 } |
| 101 |
50 function parseDomains(domains, included, excluded) | 102 function parseDomains(domains, included, excluded) |
51 { | 103 { |
52 for (let domain in domains) | 104 for (let domain in domains) |
53 { | 105 { |
54 if (domain != "") | 106 if (domain != "") |
55 { | 107 { |
56 let enabled = domains[domain]; | 108 let enabled = domains[domain]; |
57 domain = punycode.toASCII(domain.toLowerCase()); | 109 domain = punycode.toASCII(domain.toLowerCase()); |
58 | 110 |
59 if (!enabled) | 111 if (!enabled) |
(...skipping 555 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
615 selector: selector} | 667 selector: selector} |
616 }; | 668 }; |
617 | 669 |
618 if (unlessDomain) | 670 if (unlessDomain) |
619 rule.trigger["unless-domain"] = unlessDomain; | 671 rule.trigger["unless-domain"] = unlessDomain; |
620 | 672 |
621 rules.push(rule); | 673 rules.push(rule); |
622 } | 674 } |
623 } | 675 } |
624 | 676 |
| 677 /** |
| 678 * Check if two strings are a close match |
| 679 * |
| 680 * This function returns an edit operation, one of "substitute", "delete", and |
| 681 * "insert", along with an index in the source string where the edit must occur |
| 682 * in order to arrive at the target string. If the strings are not a close |
| 683 * match, it returns null. |
| 684 * |
| 685 * Two strings are considered to be a close match if they are one edit |
| 686 * operation apart. |
| 687 * |
| 688 * Deletions or insertions of a contiguous range of characters from one string |
| 689 * into the other, at the same index, are treated as a single edit. For |
| 690 * example, "internal" and "international" are considered to be one edit apart |
| 691 * and therefore a close match. |
| 692 * |
| 693 * A few things to note: |
| 694 * |
| 695 * 1) This function does not care about the format of the input strings. For |
| 696 * example, the caller may pass in regular expressions, where "[ab]" and |
| 697 * "[bc]" could be considered to be a close match, since the order within the |
| 698 * brackets doesn't matter. This function will still return null for this set |
| 699 * of inputs since they are two edits apart. |
| 700 * |
| 701 * 2) To be friendly to calling code that might be passing in regular |
| 702 * expressions, this function will simply return null if it encounters a |
| 703 * special character (e.g. "\", "?", "+", etc.) in the delta. For example, |
| 704 * given "Hello" and "Hello, how are you?", it will return null. |
| 705 * |
| 706 * 3) If the caller does indeed pass in regular expressions, it must make the |
| 707 * important assumption that the parts where two such regular expressions may |
| 708 * differ can always be treated as normal strings. For example, |
| 709 * "^https?://example.com/ads" and "^https?://example.com/adv" differ only in |
| 710 * the last character, therefore the regular expressions can safely be merged |
| 711 * into "^https?://example.com/ad[sv]". |
| 712 * |
| 713 * @param {string} s The source string |
| 714 * @param {string} t The target string |
| 715 * |
| 716 * @returns {object} An object describing the single edit operation that must |
| 717 * occur in the source string in order to arrive at the |
| 718 * target string |
| 719 */ |
| 720 function closeMatch(s, t) |
| 721 { |
| 722 let diff = s.length - t.length; |
| 723 |
| 724 // If target is longer than source, swap them for the purpose of our |
| 725 // calculation. |
| 726 if (diff < 0) |
| 727 { |
| 728 let tmp = s; |
| 729 s = t; |
| 730 t = tmp; |
| 731 } |
| 732 |
| 733 let edit = null; |
| 734 |
| 735 let i = 0; |
| 736 let j = 0; |
| 737 |
| 738 // Start from the beginning and keep going until we hit a character that |
| 739 // doesn't match. |
| 740 for (; i < s.length; i++) |
| 741 { |
| 742 if (s[i] != t[i]) |
| 743 break; |
| 744 } |
| 745 |
| 746 // Now do exactly the same from the end, but also stop if we reach the |
| 747 // position where we terminated the previous loop. |
| 748 for (; j < t.length; j++) |
| 749 { |
| 750 if (t.length - j == i || s[s.length - j - 1] != t[t.length - j - 1]) |
| 751 break; |
| 752 } |
| 753 |
| 754 if (diff == 0) |
| 755 { |
| 756 // If the strings are equal in length and the delta isn't exactly one |
| 757 // character, it's not a close match. |
| 758 if (t.length - j - i != 1) |
| 759 return null; |
| 760 } |
| 761 else if (i != t.length - j) |
| 762 { |
| 763 // For strings of unequal length, if we haven't found a match for every |
| 764 // single character in the shorter string counting from both the beginning |
| 765 // and the end, it's not a close match. |
| 766 return null; |
| 767 } |
| 768 |
| 769 for (let k = i; k < s.length - j; k++) |
| 770 { |
| 771 // If the delta contains any special characters, it's not a close match. |
| 772 if (s[k] == "." || s[k] == "+" || s[k] == "$" || s[k] == "?" || |
| 773 s[k] == "{" || s[k] == "}" || s[k] == "(" || s[k] == ")" || |
| 774 s[k] == "[" || s[k] == "]" || s[k] == "\\") |
| 775 return null; |
| 776 } |
| 777 |
| 778 if (diff == 0) |
| 779 { |
| 780 edit = {type: "substitute", index: i}; |
| 781 } |
| 782 else if (diff > 0) |
| 783 { |
| 784 edit = {type: "delete", index: i}; |
| 785 |
| 786 if (diff > 1) |
| 787 edit.endIndex = s.length - j; |
| 788 } |
| 789 else |
| 790 { |
| 791 edit = {type: "insert", index: i}; |
| 792 |
| 793 if (diff < -1) |
| 794 edit.endIndex = s.length - j; |
| 795 } |
| 796 |
| 797 return edit; |
| 798 } |
| 799 |
| 800 function eliminateRedundantRulesByURLFilter(rulesInfo, exhaustive) |
| 801 { |
| 802 const heuristicRange = 1000; |
| 803 |
| 804 let ol = rulesInfo.length; |
| 805 |
| 806 // Throw out obviously redundant rules. |
| 807 return async(rulesInfo, (ruleInfo, index) => () => |
| 808 { |
| 809 // If this rule is already marked as redundant, don't bother comparing it |
| 810 // with other rules. |
| 811 if (rulesInfo[index].redundant) |
| 812 return; |
| 813 |
| 814 let limit = exhaustive ? rulesInfo.length : |
| 815 Math.min(index + heuristicRange, rulesInfo.length); |
| 816 |
| 817 for (let i = index, j = i + 1; j < limit; j++) |
| 818 { |
| 819 if (rulesInfo[j].redundant) |
| 820 continue; |
| 821 |
| 822 let source = rulesInfo[i].rule.trigger["url-filter"]; |
| 823 let target = rulesInfo[j].rule.trigger["url-filter"]; |
| 824 |
| 825 if (source.length >= target.length) |
| 826 { |
| 827 // If one URL filter is a substring of the other starting at the |
| 828 // beginning, the other one is clearly redundant. |
| 829 if (source.substring(0, target.length) == target) |
| 830 { |
| 831 rulesInfo[i].redundant = true; |
| 832 break; |
| 833 } |
| 834 } |
| 835 else if (target.substring(0, source.length) == source) |
| 836 { |
| 837 rulesInfo[j].redundant = true; |
| 838 } |
| 839 } |
| 840 }) |
| 841 .then(() => rulesInfo.filter(ruleInfo => !ruleInfo.redundant)); |
| 842 } |
| 843 |
| 844 function findMatchesForRuleByURLFilter(rulesInfo, index, exhaustive) |
| 845 { |
| 846 // Closely matching rules are likely to be within a certain range. We only |
| 847 // look for matches within this range by default. If we increase this value, |
| 848 // it can give us more matches and a smaller resulting rule set, but possibly |
| 849 // at a significant performance cost. |
| 850 // |
| 851 // If the exhaustive option is true, we simply ignore this value and look for |
| 852 // matches throughout the rule set. |
| 853 const heuristicRange = 1000; |
| 854 |
| 855 let limit = exhaustive ? rulesInfo.length : |
| 856 Math.min(index + heuristicRange, rulesInfo.length); |
| 857 |
| 858 for (let i = index, j = i + 1; j < limit; j++) |
| 859 { |
| 860 let source = rulesInfo[i].rule.trigger["url-filter"]; |
| 861 let target = rulesInfo[j].rule.trigger["url-filter"]; |
| 862 |
| 863 let edit = closeMatch(source, target); |
| 864 |
| 865 if (edit) |
| 866 { |
| 867 let urlFilter, ruleInfo, match = {edit}; |
| 868 |
| 869 if (edit.type == "insert") |
| 870 { |
| 871 // Convert the insertion into a deletion and stick it on the target |
| 872 // rule instead. We can only group deletions and substitutions; |
| 873 // therefore insertions must be treated as deletions on the target |
| 874 // rule. |
| 875 urlFilter = target; |
| 876 ruleInfo = rulesInfo[j]; |
| 877 match.index = i; |
| 878 edit.type = "delete"; |
| 879 } |
| 880 else |
| 881 { |
| 882 urlFilter = source; |
| 883 ruleInfo = rulesInfo[i]; |
| 884 match.index = j; |
| 885 } |
| 886 |
| 887 // If the edit has an end index, it represents a multiple character |
| 888 // edit. |
| 889 let multiEdit = !!edit.endIndex; |
| 890 |
| 891 if (multiEdit) |
| 892 { |
| 893 // We only care about a single multiple character edit because the |
| 894 // number of characters for such a match doesn't matter, we can |
| 895 // only merge with one other rule. |
| 896 if (!ruleInfo.multiEditMatch) |
| 897 ruleInfo.multiEditMatch = match; |
| 898 } |
| 899 else |
| 900 { |
| 901 // For single character edits, multiple rules can be merged into |
| 902 // one. e.g. "ad", "ads", and "adv" can be merged into "ad[sv]?". |
| 903 if (!ruleInfo.matches) |
| 904 ruleInfo.matches = new Array(urlFilter.length); |
| 905 |
| 906 // Matches at a particular index. For example, for a source string |
| 907 // "ads", both target strings "ad" (deletion) and "adv" |
| 908 // (substitution) match at index 2, hence they are grouped together |
| 909 // to possibly be merged later into "ad[sv]?". |
| 910 let matchesForIndex = ruleInfo.matches[edit.index]; |
| 911 |
| 912 if (matchesForIndex) |
| 913 { |
| 914 matchesForIndex.push(match); |
| 915 } |
| 916 else |
| 917 { |
| 918 matchesForIndex = [match]; |
| 919 ruleInfo.matches[edit.index] = matchesForIndex; |
| 920 } |
| 921 |
| 922 // Keep track of the best set of matches. We later sort by this to |
| 923 // get best results. |
| 924 if (!ruleInfo.bestMatches || |
| 925 matchesForIndex.length > ruleInfo.bestMatches.length) |
| 926 ruleInfo.bestMatches = matchesForIndex; |
| 927 } |
| 928 } |
| 929 } |
| 930 } |
| 931 |
| 932 function mergeCandidateRulesByURLFilter(rulesInfo) |
| 933 { |
| 934 // Filter out rules that have no matches at all. |
| 935 let candidateRulesInfo = rulesInfo.filter(ruleInfo => |
| 936 { |
| 937 return ruleInfo.bestMatches || ruleInfo.multiEditMatch |
| 938 }); |
| 939 |
| 940 // For best results, we have to sort the candidates by the largest set of |
| 941 // matches. |
| 942 // |
| 943 // For example, we want "ads", "bds", "adv", "bdv", "adx", and "bdx" to |
| 944 // generate "ad[svx]" and "bd[svx]" (2 rules), not "[ab]ds", "[ab]dv", and |
| 945 // "[ab]dx" (3 rules). |
| 946 candidateRulesInfo.sort((ruleInfo1, ruleInfo2) => |
| 947 { |
| 948 let weight1 = ruleInfo1.bestMatches ? ruleInfo1.bestMatches.length : |
| 949 ruleInfo1.multiEditMatch ? 1 : 0; |
| 950 let weight2 = ruleInfo2.bestMatches ? ruleInfo2.bestMatches.length : |
| 951 ruleInfo2.multiEditMatch ? 1 : 0; |
| 952 |
| 953 return weight2 - weight1; |
| 954 }); |
| 955 |
| 956 for (let ruleInfo of candidateRulesInfo) |
| 957 { |
| 958 let rule = ruleInfo.rule; |
| 959 |
| 960 // If this rule has already been merged into another rule, we skip it. |
| 961 if (ruleInfo.merged) |
| 962 continue; |
| 963 |
| 964 // Find the best set of rules to group, which is simply the largest set. |
| 965 let best = (ruleInfo.matches || []).reduce((best, matchesForIndex) => |
| 966 { |
| 967 matchesForIndex = (matchesForIndex || []).filter(match => |
| 968 { |
| 969 // Filter out rules that have either already been merged into other |
| 970 // rules or have had other rules merged into them. |
| 971 return !rulesInfo[match.index].merged && |
| 972 !rulesInfo[match.index].mergedInto; |
| 973 }); |
| 974 |
| 975 return matchesForIndex.length > best.length ? matchesForIndex : best; |
| 976 }, |
| 977 []); |
| 978 |
| 979 let multiEdit = false; |
| 980 |
| 981 // If we couldn't find a single rule to merge with, let's see if we have a |
| 982 // multiple character edit. e.g. we could merge "ad" and "adserver" into |
| 983 // "ad(server)?". |
| 984 if (best.length == 0 && ruleInfo.multiEditMatch && |
| 985 !rulesInfo[ruleInfo.multiEditMatch.index].merged && |
| 986 !rulesInfo[ruleInfo.multiEditMatch.index].mergedInto) |
| 987 { |
| 988 best = [ruleInfo.multiEditMatch]; |
| 989 multiEdit = true; |
| 990 } |
| 991 |
| 992 if (best.length > 0) |
| 993 { |
| 994 let urlFilter = rule.trigger["url-filter"]; |
| 995 |
| 996 let editIndex = best[0].edit.index; |
| 997 |
| 998 if (!multiEdit) |
| 999 { |
| 1000 // Merge all the matching rules into this one. |
| 1001 |
| 1002 let characters = [urlFilter[editIndex]]; |
| 1003 let quantifier = ""; |
| 1004 |
| 1005 for (let match of best) |
| 1006 { |
| 1007 if (match.edit.type == "delete") |
| 1008 { |
| 1009 quantifier = "?"; |
| 1010 } |
| 1011 else |
| 1012 { |
| 1013 let character = rulesInfo[match.index].rule |
| 1014 .trigger["url-filter"][editIndex]; |
| 1015 |
| 1016 // Insert any hyphen at the beginning so it gets interpreted as a |
| 1017 // literal hyphen. |
| 1018 if (character == "-") |
| 1019 characters.unshift(character); |
| 1020 else |
| 1021 characters.push(character); |
| 1022 } |
| 1023 |
| 1024 // Mark the target rule as merged so other rules don't try to merge |
| 1025 // it again. |
| 1026 rulesInfo[match.index].merged = true; |
| 1027 } |
| 1028 |
| 1029 urlFilter = urlFilter.substring(0, editIndex + 1) + quantifier + |
| 1030 urlFilter.substring(editIndex + 1); |
| 1031 if (characters.length > 1) |
| 1032 { |
| 1033 urlFilter = urlFilter.substring(0, editIndex) + "[" + |
| 1034 characters.join("") + "]" + |
| 1035 urlFilter.substring(editIndex + 1); |
| 1036 } |
| 1037 } |
| 1038 else |
| 1039 { |
| 1040 let editEndIndex = best[0].edit.endIndex; |
| 1041 |
| 1042 // Mark the target rule as merged so other rules don't try to merge it |
| 1043 // again. |
| 1044 rulesInfo[best[0].index].merged = true; |
| 1045 |
| 1046 urlFilter = urlFilter.substring(0, editIndex) + "(" + |
| 1047 urlFilter.substring(editIndex, editEndIndex) + ")?" + |
| 1048 urlFilter.substring(editEndIndex); |
| 1049 } |
| 1050 |
| 1051 rule.trigger["url-filter"] = urlFilter; |
| 1052 |
| 1053 // Mark this rule as one that has had other rules merged into it. |
| 1054 ruleInfo.mergedInto = true; |
| 1055 } |
| 1056 } |
| 1057 } |
| 1058 |
| 1059 function mergeRulesByURLFilter(rulesInfo, exhaustive) |
| 1060 { |
| 1061 return async(rulesInfo, (ruleInfo, index) => () => |
| 1062 findMatchesForRuleByURLFilter(rulesInfo, index, exhaustive) |
| 1063 ) |
| 1064 .then(() => mergeCandidateRulesByURLFilter(rulesInfo)); |
| 1065 } |
| 1066 |
| 1067 function mergeRulesByArrayProperty(rulesInfo, propertyType, property) |
| 1068 { |
| 1069 if (rulesInfo.length <= 1) |
| 1070 return; |
| 1071 |
| 1072 let valueSet = new Set(rulesInfo[0].rule[propertyType][property]); |
| 1073 |
| 1074 for (let i = 1; i < rulesInfo.length; i++) |
| 1075 { |
| 1076 for (let value of rulesInfo[i].rule[propertyType][property] || []) |
| 1077 valueSet.add(value); |
| 1078 |
| 1079 rulesInfo[i].merged = true; |
| 1080 } |
| 1081 |
| 1082 if (valueSet.size > 0) |
| 1083 rulesInfo[0].rule[propertyType][property] = Array.from(valueSet); |
| 1084 |
| 1085 rulesInfo[0].mergedInto = true; |
| 1086 } |
| 1087 |
| 1088 function groupRulesByMergeableProperty(rulesInfo, propertyType, property) |
| 1089 { |
| 1090 let mergeableRulesInfoByGroup = new Map(); |
| 1091 |
| 1092 for (let ruleInfo of rulesInfo) |
| 1093 { |
| 1094 let copy = { |
| 1095 trigger: Object.assign({}, ruleInfo.rule.trigger), |
| 1096 action: Object.assign({}, ruleInfo.rule.action) |
| 1097 }; |
| 1098 |
| 1099 delete copy[propertyType][property]; |
| 1100 |
| 1101 let groupKey = JSON.stringify(copy); |
| 1102 |
| 1103 let mergeableRulesInfo = mergeableRulesInfoByGroup.get(groupKey); |
| 1104 |
| 1105 if (mergeableRulesInfo) |
| 1106 mergeableRulesInfo.push(ruleInfo); |
| 1107 else |
| 1108 mergeableRulesInfoByGroup.set(groupKey, [ruleInfo]); |
| 1109 } |
| 1110 |
| 1111 return mergeableRulesInfoByGroup; |
| 1112 } |
| 1113 |
| 1114 function mergeRules(rules, exhaustive) |
| 1115 { |
| 1116 let rulesInfo = rules.map(rule => ({rule})); |
| 1117 |
| 1118 let arrayPropertiesToMergeBy = ["resource-type", "if-domain"]; |
| 1119 |
| 1120 return async(() => |
| 1121 { |
| 1122 let map = groupRulesByMergeableProperty(rulesInfo, "trigger", "url-filter"); |
| 1123 return async(map.values(), mergeableRulesInfo => () => |
| 1124 eliminateRedundantRulesByURLFilter(mergeableRulesInfo, exhaustive) |
| 1125 .then(rulesInfo => mergeRulesByURLFilter(rulesInfo, exhaustive)) |
| 1126 ) |
| 1127 .then(() => |
| 1128 { |
| 1129 // Filter out rules that are redundant or have been merged into other |
| 1130 // rules. |
| 1131 rulesInfo = rulesInfo.filter(ruleInfo => !ruleInfo.redundant && |
| 1132 !ruleInfo.merged); |
| 1133 }); |
| 1134 }) |
| 1135 .then(() => async(arrayPropertiesToMergeBy, arrayProperty => () => |
| 1136 { |
| 1137 let map = groupRulesByMergeableProperty(rulesInfo, "trigger", |
| 1138 arrayProperty); |
| 1139 return async(map.values(), mergeableRulesInfo => () => |
| 1140 mergeRulesByArrayProperty(mergeableRulesInfo, "trigger", arrayProperty) |
| 1141 ) |
| 1142 .then(() => |
| 1143 { |
| 1144 rulesInfo = rulesInfo.filter(ruleInfo => !ruleInfo.merged); |
| 1145 }); |
| 1146 })) |
| 1147 .then(() => rulesInfo.map(ruleInfo => ruleInfo.rule)); |
| 1148 } |
| 1149 |
625 let ContentBlockerList = | 1150 let ContentBlockerList = |
626 /** | 1151 /** |
627 * Create a new Adblock Plus filter to content blocker list converter | 1152 * Create a new Adblock Plus filter to content blocker list converter |
628 * | 1153 * |
| 1154 * @param {object} options Options for content blocker list generation |
| 1155 * |
629 * @constructor | 1156 * @constructor |
630 */ | 1157 */ |
631 exports.ContentBlockerList = function () | 1158 exports.ContentBlockerList = function (options) |
632 { | 1159 { |
| 1160 const defaultOptions = { |
| 1161 merge: "auto" |
| 1162 }; |
| 1163 |
| 1164 this.options = Object.assign({}, defaultOptions, options); |
| 1165 |
633 this.requestFilters = []; | 1166 this.requestFilters = []; |
634 this.requestExceptions = []; | 1167 this.requestExceptions = []; |
635 this.elemhideFilters = []; | 1168 this.elemhideFilters = []; |
636 this.elemhideExceptions = []; | 1169 this.elemhideExceptions = []; |
637 this.genericblockExceptions = []; | 1170 this.genericblockExceptions = []; |
638 this.generichideExceptions = []; | 1171 this.generichideExceptions = []; |
639 this.elemhideSelectorExceptions = new Map(); | 1172 this.elemhideSelectorExceptions = new Map(); |
640 }; | 1173 }; |
641 | 1174 |
642 /** | 1175 /** |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
677 let domains = this.elemhideSelectorExceptions[filter.selector]; | 1210 let domains = this.elemhideSelectorExceptions[filter.selector]; |
678 if (!domains) | 1211 if (!domains) |
679 domains = this.elemhideSelectorExceptions[filter.selector] = []; | 1212 domains = this.elemhideSelectorExceptions[filter.selector] = []; |
680 | 1213 |
681 parseDomains(filter.domains, domains, []); | 1214 parseDomains(filter.domains, domains, []); |
682 } | 1215 } |
683 }; | 1216 }; |
684 | 1217 |
685 /** | 1218 /** |
686 * Generate content blocker list for all filters that were added | 1219 * Generate content blocker list for all filters that were added |
687 * | |
688 * @returns {Filter} filter Filter to convert | |
689 */ | 1220 */ |
690 ContentBlockerList.prototype.generateRules = function(filter) | 1221 ContentBlockerList.prototype.generateRules = function() |
691 { | 1222 { |
692 let rules = []; | 1223 let cssRules = []; |
| 1224 let cssExceptionRules = []; |
| 1225 let blockingRules = []; |
| 1226 let blockingExceptionRules = []; |
| 1227 |
| 1228 let ruleGroups = [cssRules, cssExceptionRules, |
| 1229 blockingRules, blockingExceptionRules]; |
693 | 1230 |
694 let genericSelectors = []; | 1231 let genericSelectors = []; |
695 let groupedElemhideFilters = new Map(); | 1232 let groupedElemhideFilters = new Map(); |
696 | 1233 |
697 for (let filter of this.elemhideFilters) | 1234 for (let filter of this.elemhideFilters) |
698 { | 1235 { |
699 let result = convertElemHideFilter(filter, this.elemhideSelectorExceptions); | 1236 let result = convertElemHideFilter(filter, this.elemhideSelectorExceptions); |
700 if (!result) | 1237 if (!result) |
701 continue; | 1238 continue; |
702 | 1239 |
(...skipping 26 matching lines...) Expand all Loading... |
729 // --max_old_space_size=4096 | 1266 // --max_old_space_size=4096 |
730 let elemhideExceptionDomains = extractFilterDomains(this.elemhideExceptions); | 1267 let elemhideExceptionDomains = extractFilterDomains(this.elemhideExceptions); |
731 | 1268 |
732 let genericSelectorExceptionDomains = | 1269 let genericSelectorExceptionDomains = |
733 extractFilterDomains(this.generichideExceptions); | 1270 extractFilterDomains(this.generichideExceptions); |
734 elemhideExceptionDomains.forEach(name => | 1271 elemhideExceptionDomains.forEach(name => |
735 { | 1272 { |
736 genericSelectorExceptionDomains.add(name); | 1273 genericSelectorExceptionDomains.add(name); |
737 }); | 1274 }); |
738 | 1275 |
739 addCSSRules(rules, genericSelectors, null, genericSelectorExceptionDomains); | 1276 addCSSRules(cssRules, genericSelectors, null, |
| 1277 genericSelectorExceptionDomains); |
740 | 1278 |
741 // Filter out whitelisted domains. | 1279 // Filter out whitelisted domains. |
742 elemhideExceptionDomains.forEach(domain => | 1280 elemhideExceptionDomains.forEach(domain => |
743 groupedElemhideFilters.delete(domain)); | 1281 groupedElemhideFilters.delete(domain)); |
744 | 1282 |
745 groupedElemhideFilters.forEach((selectors, matchDomain) => | 1283 groupedElemhideFilters.forEach((selectors, matchDomain) => |
746 { | 1284 { |
747 addCSSRules(rules, selectors, matchDomain, elemhideExceptionDomains); | 1285 addCSSRules(cssRules, selectors, matchDomain, elemhideExceptionDomains); |
748 }); | 1286 }); |
749 | 1287 |
750 let requestFilterExceptionDomains = []; | 1288 let requestFilterExceptionDomains = []; |
751 for (let filter of this.genericblockExceptions) | 1289 for (let filter of this.genericblockExceptions) |
752 { | 1290 { |
753 let parsed = parseFilterRegexpSource(filter.regexpSource); | 1291 let parsed = parseFilterRegexpSource(filter.regexpSource); |
754 if (parsed.hostname) | 1292 if (parsed.hostname) |
755 requestFilterExceptionDomains.push(parsed.hostname); | 1293 requestFilterExceptionDomains.push(parsed.hostname); |
756 } | 1294 } |
757 | 1295 |
758 for (let filter of this.requestFilters) | 1296 for (let filter of this.requestFilters) |
759 { | 1297 { |
760 convertFilterAddRules(rules, filter, "block", true, | 1298 convertFilterAddRules(blockingRules, filter, "block", true, |
761 requestFilterExceptionDomains); | 1299 requestFilterExceptionDomains); |
762 } | 1300 } |
763 | 1301 |
764 for (let filter of this.requestExceptions) | 1302 for (let filter of this.requestExceptions) |
765 convertFilterAddRules(rules, filter, "ignore-previous-rules", true); | 1303 { |
| 1304 convertFilterAddRules(blockingExceptionRules, filter, |
| 1305 "ignore-previous-rules", true); |
| 1306 } |
766 | 1307 |
767 return rules; | 1308 return async(ruleGroups, (group, index) => () => |
| 1309 { |
| 1310 let next = () => |
| 1311 { |
| 1312 if (index == ruleGroups.length - 1) |
| 1313 return ruleGroups.reduce((all, rules) => all.concat(rules), []); |
| 1314 }; |
| 1315 |
| 1316 if (this.options.merge == "all" || |
| 1317 (this.options.merge == "auto" && |
| 1318 ruleGroups.reduce((n, group) => n + group.length, 0) > 50000)) |
| 1319 { |
| 1320 return mergeRules(ruleGroups[index], this.options.merge == "all") |
| 1321 .then(rules => |
| 1322 { |
| 1323 ruleGroups[index] = rules; |
| 1324 return next(); |
| 1325 }); |
| 1326 } |
| 1327 |
| 1328 return next(); |
| 1329 }); |
768 }; | 1330 }; |
OLD | NEW |