@@ 0,0 1,101 @@
+From d5d3351836fde35cbbad374d50685353eb38ddbc Mon Sep 17 00:00:00 2001
+From: Drew DeVault <sir@cmpwn.com>
+Date: Fri, 4 Feb 2022 13:09:04 +0100
+Subject: [PATCH] lexers: add Hare
+
+---
+ pygments/lexers/_mapping.py | 1 +
+ pygments/lexers/hare.py | 69 +++++++++++++++++++++++++++++++++++++
+ 2 files changed, 70 insertions(+)
+ create mode 100644 pygments/lexers/hare.py
+
+diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
+index 40120a87..873b30f8 100644
+--- a/pygments/lexers/_mapping.py
++++ b/pygments/lexers/_mapping.py
+@@ -198,6 +198,7 @@ LEXERS = {
+ 'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
+ 'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
+ 'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
++ 'HareLexer': ('pygments.lexers.hare', 'Hare', ('hare',), ('*.ha',), ('text/x-hare',)),
+ 'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
+ 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('haxe', 'hxsl', 'hx'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
+ 'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
+diff --git a/pygments/lexers/hare.py b/pygments/lexers/hare.py
+new file mode 100644
+index 00000000..f501d670
+--- /dev/null
++++ b/pygments/lexers/hare.py
+@@ -0,0 +1,69 @@
++"""
++ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
++ :license: BSD, see LICENSE for details.
++"""
++
++import re
++
++from pygments.lexer import RegexLexer, include, bygroups, inherit, words, \
++ default
++from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
++ Number, Punctuation
++
++__all__ = ['HareLexer']
++
++class HareLexer(RegexLexer):
++ name = 'Hare'
++ aliases = ['hare']
++ filenames = ['*.ha']
++ mimetypes = ['text/x-hare']
++
++ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
++ _ws1 = r'\s*(?:/[*].*?[*]/\s*)?'
++
++ tokens = {
++ 'whitespace': [
++ (r'^use.*;', Comment.Preproc),
++ (r'@[a-z]+', Comment.Preproc),
++ (r'\n', Text),
++ (r'\s+', Text),
++ (r'//.*?$', Comment.Single),
++ ],
++ 'statements': [
++ (r'(L?)(")', bygroups(String.Affix, String), 'string'),
++ (r"(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')",
++ bygroups(String.Affix, String.Char, String.Char, String.Char)),
++ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
++ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
++ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
++ (r'0o[0-7]+[LlUu]*', Number.Oct),
++ (r'\d+[zui]?(\d+)?', Number.Integer),
++ (r'[~!%^&*+=|?:<>/-]', Operator),
++ (words('as', 'is'), Operator),
++ (r'[()\[\],.{};]', Punctuation),
++ (words(('abort', 'alloc', 'append', 'assert', 'case', 'char',
++ 'const', 'def', 'defer', 'delete', 'else', 'enum', 'export',
++ 'fn', 'for', 'free', 'if', 'let', 'len', 'match', 'offset',
++ 'return', 'static', 'struct', 'switch', 'type', 'union',
++ 'yield'),
++ suffix=r'\b'), Keyword),
++ (r'(bool|int|uint|uintptr|u8|u16|u32|u64|i8|i16|i32|i64|f32|f64|null|void|nullable|rune|size)\b',
++ Keyword.Type),
++ (r'(true|false|null)\b', Name.Builtin),
++ ('[a-zA-Z_]\w*', Name),
++ ],
++ 'string': [
++ (r'"', String, '#pop'),
++ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
++ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
++ (r'[^\\"\n]+', String), # all other characters
++ (r'\\', String), # stray backslash
++ ],
++ 'root': [
++ include('whitespace'),
++ include('statements'),
++ ],
++ }
++
++ def __init__(self, **options):
++ RegexLexer.__init__(self, **options)
+--
+2.35.1
+