From 32cb51ae38ff0a6a4be0baeb9238b3fe63cac337 Mon Sep 17 00:00:00 2001 From: shumingma Date: Sat, 4 Mar 2023 01:11:34 -0800 Subject: [PATCH] v0.1.2 --- setup.py | 4 ++-- torchscale/component/relative_position_bias.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 4593330..5ee67a7 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ from setuptools import find_packages, setup setup( name="torchscale", - version="0.1.1", + version="0.1.2", author="TorchScale Team", author_email="Shuming.Ma@microsoft.com", description="Transformers at any scale", @@ -15,7 +15,7 @@ setup( long_description_content_type="text/markdown", keywords="Transformers at any scale", license="MIT", - url="https://github.com/msranlp/torchscale", + url="https://github.com/microsoft/torchscale", packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), install_requires=["torch>=1.8", "fairscale==0.4.0", "timm==0.4.12"], python_requires=">=3.8.0", diff --git a/torchscale/component/relative_position_bias.py b/torchscale/component/relative_position_bias.py index ed29e4e..e9686f0 100644 --- a/torchscale/component/relative_position_bias.py +++ b/torchscale/component/relative_position_bias.py @@ -63,6 +63,7 @@ class RelativePositionBias(nn.Module): relative_position, # shape (qlen, klen) bidirectional=self.bidirectional, num_buckets=self.num_buckets, + max_distance=self.max_distance, ) rp_bucket = rp_bucket.to(self.relative_attention_bias.weight.device) values = self.relative_attention_bias(