Skip to content

Commit

Permalink
CI/repo changes for shark-turbine to iree-turbine rename (#260)
Browse files Browse the repository at this point in the history
This commit makes the necessary changes to adapt to the new iree-turbine
namespace.
Related PR: iree-org/iree-turbine#197

Signed-off-by: saienduri <[email protected]>
  • Loading branch information
saienduri authored Oct 7, 2024
1 parent daa3f19 commit 9ca19a5
Show file tree
Hide file tree
Showing 33 changed files with 39 additions and 39 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ jobs:
# wheels saves multiple minutes and a lot of bandwidth on runner setup.
pip install --no-compile -r pytorch-cpu-requirements.txt
pip install --no-compile -f https://iree.dev/pip-release-links.html --src deps \
-e "git+https://github.com/iree-org/iree-turbine.git#egg=shark-turbine"
-e "git+https://github.com/iree-org/iree-turbine.git#egg=iree-turbine"
pip install --no-compile -r requirements.txt -e sharktank/
- name: Run sharktank tests
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ jobs:
# wheels saves multiple minutes and a lot of bandwidth on runner setup.
pip install --no-compile -r pytorch-cpu-requirements.txt
pip install --no-compile -f https://iree.dev/pip-release-links.html --src deps \
-e "git+https://github.com/iree-org/iree-turbine.git#egg=shark-turbine"
-e "git+https://github.com/iree-org/iree-turbine.git#egg=iree-turbine"
pip install --no-compile -r requirements.txt -e sharktank/ shortfin/
# Try with the latest nightly releases, not what iree-turbine pins.
Expand Down Expand Up @@ -85,7 +85,7 @@ jobs:
python -m pip install --no-compile --upgrade pip
pip install --no-compile -r pytorch-rocm-requirements.txt
pip install --no-compile -f https://iree.dev/pip-release-links.html --src deps \
-e "git+https://github.com/iree-org/iree-turbine.git#egg=shark-turbine"
-e "git+https://github.com/iree-org/iree-turbine.git#egg=iree-turbine"
pip install --no-compile -r requirements.txt -e sharktank/ shortfin/
- name: Run punet tests
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ pip install -r pytorch-rocm-requirements.txt
```
# Clone and install editable iree-turbine dep in deps/
pip install -f https://iree.dev/pip-release-links.html --src deps \
-e "git+https://github.com/iree-org/iree-turbine.git#egg=shark-turbine"
-e "git+https://github.com/iree-org/iree-turbine.git#egg=iree-turbine"
# Install editable local projects.
pip install -r requirements.txt -e sharktank/ shortfin/
Expand Down
2 changes: 1 addition & 1 deletion docs/model_cookbook.md
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ source .venv/bin/activate
# Install requirements.
pip install -r pytorch-cpu-requirements.txt
pip install -f https://iree.dev/pip-release-links.html --src deps \
-e "git+https://github.com/iree-org/iree-turbine.git#egg=shark-turbine"
-e "git+https://github.com/iree-org/iree-turbine.git#egg=iree-turbine"

# Install local projects.
pip install -r requirements.txt -e sharktank/ shortfin/
Expand Down
6 changes: 3 additions & 3 deletions docs/quantization.md
Original file line number Diff line number Diff line change
Expand Up @@ -277,9 +277,9 @@ is everything). We're just starting to exploit some of this as the PyTorch
level. Some examples:

* Something as simple as a humble runtime
[tensor trace/print](https://github.com/iree-org/iree-turbine/blob/main/shark_turbine/ops/iree.py#L52)
* [Simple linalg based template expansion](https://github.com/iree-org/iree-turbine/blob/main/shark_turbine/ops/_jinja_test_ops.py#L28)
(see backing example [jinja template](https://github.com/iree-org/iree-turbine/blob/main/shark_turbine/ops/templates/test_add_jinja.mlir)).
[tensor trace/print](https://github.com/iree-org/iree-turbine/blob/main/iree.turbine/ops/iree.py#L52)
* [Simple linalg based template expansion](https://github.com/iree-org/iree-turbine/blob/main/iree.turbine/ops/_jinja_test_ops.py#L28)
(see backing example [jinja template](https://github.com/iree-org/iree-turbine/blob/main/iree.turbine/ops/templates/test_add_jinja.mlir)).
* Optimal linalg-based [8-bit block scaled mmt for weight compression](https://github.com/nod-ai/sharktank/blob/main/sharktank/sharktank/kernels/mmt_block_scaled_q8.py)
(see backing [jinja template](https://github.com/nod-ai/sharktank/blob/main/sharktank/sharktank/kernels/templates/mmt_block_scaled_q8_3d.mlir)).
* DSL based [like this fused attention kernel](https://github.com/iree-org/iree-turbine/blob/main/tests/kernel/fused_attention_test.py#L20)
Expand Down
2 changes: 1 addition & 1 deletion sharktank/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def initialize_options(self):
"sharktank": ["py.typed", "kernels/templates/*.mlir"],
},
install_requires=[
"shark-turbine",
"iree-turbine",
],
extras_require={
"testing": [
Expand Down
2 changes: 1 addition & 1 deletion sharktank/sharktank/examples/export_paged_llm_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import json
import torch

from shark_turbine.aot import *
from iree.turbine.aot import *

from sharktank.layers import *
from sharktank.types import *
Expand Down
2 changes: 1 addition & 1 deletion sharktank/sharktank/examples/sharding/export_ffn_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def main(raw_args=None):
ds = Dataset.load(args.output_irpa_file)

mdl = ShardedFFN(ds.root_theta)
from shark_turbine import aot
from iree.turbine import aot

example_arg = torch.empty(bs, sl, primary_dim, dtype=torch.float16)
ep = torch.export.export(mdl, (example_arg,))
Expand Down
2 changes: 1 addition & 1 deletion sharktank/sharktank/examples/sharding/export_gemm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import torch
from torch import Tensor
from sharktank import ops
from shark_turbine import aot
from iree.turbine import aot


def export_gemm(
Expand Down
2 changes: 1 addition & 1 deletion sharktank/sharktank/export_layer/export_moe.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception

import torch
from shark_turbine.aot import *
from iree.turbine.aot import *
from sharktank.models.llama.testing import make_moe_block_theta, make_rand_torch
from sharktank.layers.mixture_of_experts_block import PreGatherMoeBlock
from ..utils import cli
Expand Down
2 changes: 1 addition & 1 deletion sharktank/sharktank/export_layer/export_paged_attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import torch.nn.functional as F

from shark_turbine.aot import *
from iree.turbine.aot import *

from sharktank.layers import *
from sharktank.types import *
Expand Down
6 changes: 3 additions & 3 deletions sharktank/sharktank/kernels/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

from jinja2 import Environment, PackageLoader, select_autoescape

from shark_turbine.support.ir_imports import (
from iree.turbine.support.ir_imports import (
FlatSymbolRefAttr,
FunctionType,
IrType,
Expand All @@ -24,15 +24,15 @@
Value,
)

from shark_turbine.runtime.op_reg import (
from iree.turbine.runtime.op_reg import (
def_library,
CustomOp,
KernelBuilder,
KernelSelection,
TensorArg,
)

from shark_turbine.transforms.merger import Merger
from iree.turbine.transforms.merger import Merger

from ..utils.logging import get_logger

Expand Down
2 changes: 1 addition & 1 deletion sharktank/sharktank/models/punet/tools/run_punet.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

import torch

from shark_turbine import aot
from iree.turbine import aot

from ..model import Unet2DConditionModel, ClassifierFreeGuidanceUnetModel
from ....utils.patching import SaveModuleResultTensorsPatch
Expand Down
4 changes: 2 additions & 2 deletions sharktank/sharktank/ops/default_impls.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from ..types.tensors import unbox_tensor, AnyTensor
from ._registry import AllOfType, AllOfExprs, AllOfExprsVariadic, IsOfType
from .signatures import *
import shark_turbine.ops.iree
import iree.turbine.ops.iree


@cat.override(AllOfType(Tensor, PrimitiveTensor))
Expand Down Expand Up @@ -393,7 +393,7 @@ def to_default(tensor: Tensor, *args, **kwargs):

@transfer_to_logical_device.override(Tensor)
def transfer_to_logical_device_default(tensor: Tensor, ordinal: int):
return shark_turbine.ops.iree.transfer_to_logical_device(
return iree.turbine.ops.iree.transfer_to_logical_device(
f"{ordinal}", unbox_tensor(tensor)
)

Expand Down
2 changes: 1 addition & 1 deletion sharktank/sharktank/types/gguf_interop/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from gguf import GGUFReader, GGUFValueType

from shark_turbine.aot import (
from iree.turbine.aot import (
ExternalTensorTrait,
)

Expand Down
2 changes: 1 addition & 1 deletion sharktank/sharktank/types/tensors.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from torch import Tensor
from torch.utils._pytree import register_pytree_node, SequenceKey
from ..utils.math import ceildiv
from shark_turbine.aot import (
from iree.turbine.aot import (
ExternalTensorTrait,
)
from ..utils import tree as tree_utils
Expand Down
2 changes: 1 addition & 1 deletion sharktank/sharktank/types/theta.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import torch
import torch.nn.functional as F

from shark_turbine.aot import (
from iree.turbine.aot import (
ExternalTensorTrait,
ParameterArchive,
ParameterArchiveEntry,
Expand Down
2 changes: 1 addition & 1 deletion sharktank/sharktank/utils/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from pathlib import Path

from shark_turbine.aot import (
from iree.turbine.aot import (
ParameterArchiveBuilder,
)

Expand Down
2 changes: 1 addition & 1 deletion sharktank/sharktank/utils/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

import logging

from shark_turbine.support.logging import get_logger
from iree.turbine.support.logging import get_logger


transform_logger: logging.Logger = get_logger("sharktank.transforms")
2 changes: 1 addition & 1 deletion sharktank/tests/kernels/batch_matmul_transpose_b_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import torch

from shark_turbine import aot
from iree.turbine import aot
from sharktank import kernels


Expand Down
2 changes: 1 addition & 1 deletion sharktank/tests/kernels/conv_2d_nchw_fchw_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import torch

from shark_turbine import aot
from iree.turbine import aot
from sharktank import kernels
from sharktank.ops.qconv_impls import _pad_last_2d

Expand Down
2 changes: 1 addition & 1 deletion sharktank/tests/kernels/einsum_q4_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import torch

from shark_turbine import aot
from iree.turbine import aot
from sharktank import kernels
from sharktank.types import layout_utils

Expand Down
2 changes: 1 addition & 1 deletion sharktank/tests/kernels/mmt_block_scaled_offset_q4_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import torch

from shark_turbine import aot
from iree.turbine import aot
from sharktank import kernels
from sharktank.types import layout_utils

Expand Down
2 changes: 1 addition & 1 deletion sharktank/tests/kernels/mmt_block_scaled_q8_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import torch

from shark_turbine import aot
from iree.turbine import aot
from sharktank import kernels


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import torch

from shark_turbine import aot
from iree.turbine import aot
from sharktank import kernels
from sharktank.types import layout_utils

Expand Down
2 changes: 1 addition & 1 deletion sharktank/tests/kernels/mmtfp_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import torch

from shark_turbine import aot
from iree.turbine import aot
from sharktank import kernels


Expand Down
2 changes: 1 addition & 1 deletion sharktank/tests/kernels/pooling_nchw_sum_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import torch

from shark_turbine import aot
from iree.turbine import aot
from sharktank import kernels
from sharktank.ops.qconv_impls import _pad_last_2d

Expand Down
2 changes: 1 addition & 1 deletion sharktank/tests/layers/sharded_conv2d_with_iree_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from pathlib import Path
import tempfile
import torch
from shark_turbine import aot
from iree.turbine import aot
from sharktank.models.punet.layers import Conv2DLayer
from sharktank import ops
from sharktank.types import (
Expand Down
2 changes: 1 addition & 1 deletion sharktank/tests/models/llama/moe_block_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from typing import List

import torch
from shark_turbine.aot import *
from iree.turbine.aot import *
from sharktank.models.llama.testing import make_moe_block_theta, make_rand_torch
from sharktank.layers.mixture_of_experts_block import PreGatherMoeBlock
from sharktank import ops
Expand Down
2 changes: 1 addition & 1 deletion sharktank/tests/models/llama/sharded_llama_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import tempfile
import torch
from copy import deepcopy
from shark_turbine.aot import FxProgramsBuilder, export
from iree.turbine.aot import FxProgramsBuilder, export
import iree.runtime
from pathlib import Path

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import torch


from shark_turbine import aot
from iree.turbine import aot
from sharktank.models.punet.testing import make_resnet_block_2d_theta
from sharktank.models.punet.layers import ResnetBlock2D
from sharktank.models.punet.sharding import ResnetBlock2DSplitOutputChannelsSharding
Expand Down
2 changes: 1 addition & 1 deletion sharktank/tests/types/dataset_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

import torch

from shark_turbine.aot import ExternalTensorTrait
from iree.turbine.aot import ExternalTensorTrait
from sharktank.types import *


Expand Down
2 changes: 1 addition & 1 deletion turbine-requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
-e "git+https://github.com/iree-org/iree-turbine.git#egg=shark-turbine"
-e "git+https://github.com/iree-org/iree-turbine.git#egg=iree-turbine"

0 comments on commit 9ca19a5

Please sign in to comment.